From 31f0de8db86214171888bd1d808e41f7250ca5f0 Mon Sep 17 00:00:00 2001 From: Valentin Knabel Date: Mon, 7 Feb 2022 23:09:47 +0100 Subject: [PATCH 1/3] feat: syntax highlighting without doc sync #28 --- .vscode/settings.json | 3 + CHANGELOG.md | 4 + ...dler-text-document-semantic-tokens-full.go | 283 ++++++++++-------- langsrv/semantic-highlighting.go | 26 +- 4 files changed, 179 insertions(+), 137 deletions(-) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..8481803 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "lithia.path": "/Users/vknabel/dev/lithia/lithia" +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ccf355..9f2e0ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +## v0.0.13-next + +- lsp: semantic syntax highlighting #28 + ## v0.0.12 - cli: new CLI interface, including, help and version diff --git a/langsrv/handler-text-document-semantic-tokens-full.go b/langsrv/handler-text-document-semantic-tokens-full.go index 9f2b0e4..81eaa81 100644 --- a/langsrv/handler-text-document-semantic-tokens-full.go +++ b/langsrv/handler-text-document-semantic-tokens-full.go @@ -4,7 +4,7 @@ import ( sitter "github.com/smacker/go-tree-sitter" "github.com/tliron/glsp" protocol "github.com/tliron/glsp/protocol_3_16" - "github.com/vknabel/lithia/parser" + syntax "github.com/vknabel/tree-sitter-lithia" ) func textDocumentSemanticTokensFull(context *glsp.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { @@ -14,134 +14,169 @@ func textDocumentSemanticTokensFull(context *glsp.Context, params *protocol.Sema return nil, err } rootNode := fileParser.Tree.RootNode() - tokens := highlightedTokensEntriesForNode(rootNode) - return &protocol.SemanticTokens{ - Data: serializeHighlightedTokens(tokens), - }, nil -} + highlightsQuery, err := sitter.NewQuery([]byte(` + [ + "func" + "let" + "enum" + "data" + "module" + "import" + "extern" + "type" + ] @keyword + + [ + "=>" + ] @operator + + [ + "," + "." + ] @punctuation + + [ + "{" + "}" + "(" + ")" + "[" + "]" + ] @punctuation.bracket + + (binary_expression operator: (["*" "/" "+" "-" "==" "!=" ">=" ">" "<" "<=" "&&" "||"]) @operator) ; extract + (unary_expression operator: (["!"]) @operator) + + (parameter_list (identifier) @variable.parameter) + (number_literal) @constant.numeric + (comment) @comment + (function_declaration name: (identifier) @function) + (let_declaration name: (identifier) @variable) + (enum_declaration name: (identifier) @type.enum) + (enum_case_reference) @type.case + (data_declaration name: (identifier) @type.data) + (data_property_function name: (identifier) @function) + (data_property_value name: (identifier) @property) + (extern_declaration + name: (identifier) @variable.builtin + !properties + !parameters) + (extern_declaration + name: (identifier) @function.builtin + !properties) + (extern_declaration + name: (identifier) @type.builtin + !parameters) + (import_declaration name: (import_module) @variable.import) + (import_members (identifier) @variable.import) + (module_declaration name: (identifier) @variable.import) + (complex_invocation_expression function: (identifier) @function) + (simple_invocation_expression function: (identifier) @function) + (string_literal) @string + (escape_sequence) @string.special + (type_expression type: (identifier) @type.enum) + (type_case label: (identifier) @type.case) + (simple_invocation_expression function: (member_access (member_identifier) @function @method)) + (complex_invocation_expression function: (member_access (member_identifier) @function @method)) + (member_identifier) @property + + (ERROR) @error + (identifier) @variable + `), syntax.GetLanguage()) + if err != nil { + return nil, err + } + cursor := sitter.NewQueryCursor() + cursor.Exec(highlightsQuery, rootNode) + defer cursor.Close() -func highlightedTokensEntriesForNode(node *sitter.Node) []highlightedToken { tokens := make([]highlightedToken, 0) - childCount := int(node.ChildCount()) - for i := 0; i < childCount; i++ { - child := node.Child(i) - switch child.Type() { - case parser.TYPE_NODE_MODULE_DECLARATION: - nameChild := child.ChildByFieldName("name") - if nameChild != nil { - tokens = append(tokens, highlightedToken{ - line: uint32(nameChild.StartPoint().Row), - column: uint32(nameChild.StartPoint().Column), - length: nameChild.EndByte() - nameChild.StartByte(), - tokenType: token_namespace, - tokenModifiers: []tokenModifier{modifier_declaration}, - }) + for match, ok := cursor.NextMatch(); ok; match, ok = cursor.NextMatch() { + for _, capture := range match.Captures { + captureName := highlightsQuery.CaptureNameForId(capture.Index) + capturedNode := capture.Node + tokenType := tokenTypeForCaptureName(captureName) + if tokenType == nil { + continue } - keywordChild := child.Child(0) - if keywordChild != nil { - tokens = append(tokens, - highlightedToken{ - line: uint32(keywordChild.StartPoint().Row), - column: uint32(keywordChild.StartPoint().Column), - length: keywordChild.EndByte() - keywordChild.StartByte(), - tokenType: token_keyword, - tokenModifiers: nil, - }, - ) - } - case parser.TYPE_NODE_NUMBER_LITERAL: - tokens = append(tokens, - highlightedToken{ - line: uint32(child.StartPoint().Row), - column: uint32(child.StartPoint().Column), - length: child.EndByte() - child.StartByte(), - tokenType: token_number, - tokenModifiers: nil, - }, - ) - case parser.TYPE_NODE_STRING_LITERAL: + tokenModifiers := tokenModifiersForCaptureName(captureName) tokens = append(tokens, highlightedToken{ - line: uint32(child.StartPoint().Row), - column: uint32(child.StartPoint().Column), - length: child.EndByte() - child.StartByte(), - tokenType: token_string, - tokenModifiers: nil, + line: uint32(capturedNode.StartPoint().Row), + column: uint32(capturedNode.StartPoint().Column), + length: capturedNode.EndByte() - capturedNode.StartByte(), + tokenType: *tokenType, + tokenModifiers: tokenModifiers, }) - case parser.TYPE_NODE_COMMENT: - tokens = append(tokens, - highlightedToken{ - line: uint32(child.StartPoint().Row), - column: uint32(child.StartPoint().Column), - length: child.EndByte() - child.StartByte(), - tokenType: token_comment, - tokenModifiers: nil, - }, - ) - case parser.TYPE_NODE_DATA_DECLARATION: - keywordChild := child.Child(0) - if keywordChild != nil { - tokens = append(tokens, - highlightedToken{ - line: uint32(keywordChild.StartPoint().Row), - column: uint32(keywordChild.StartPoint().Column), - length: keywordChild.EndByte() - keywordChild.StartByte(), - tokenType: token_keyword, - tokenModifiers: nil, - }, - ) - } - nameChild := child.ChildByFieldName("name") - if nameChild != nil { - tokens = append(tokens, highlightedToken{ - line: uint32(nameChild.StartPoint().Row), - column: uint32(nameChild.StartPoint().Column), - length: nameChild.EndByte() - nameChild.StartByte(), - tokenType: token_struct, - tokenModifiers: []tokenModifier{modifier_declaration}, - }) - } - tokens = append(tokens, highlightedTokensEntriesForNode(child)...) - case parser.TYPE_NODE_FUNCTION_DECLARATION: - keywordChild := child.Child(0) - if keywordChild != nil { - tokens = append(tokens, - highlightedToken{ - line: uint32(keywordChild.StartPoint().Row), - column: uint32(keywordChild.StartPoint().Column), - length: keywordChild.EndByte() - keywordChild.StartByte(), - tokenType: token_keyword, - tokenModifiers: nil, - }, - ) - } - nameChild := child.ChildByFieldName("name") - if nameChild != nil { - tokens = append(tokens, highlightedToken{ - line: uint32(nameChild.StartPoint().Row), - column: uint32(nameChild.StartPoint().Column), - length: nameChild.EndByte() - nameChild.StartByte(), - tokenType: token_function, - tokenModifiers: []tokenModifier{modifier_declaration}, - }) - } - tokens = append(tokens, highlightedTokensEntriesForNode(child)...) - case parser.TYPE_NODE_TYPE_EXPRESSION: - keywordChild := child.Child(0) - if keywordChild != nil { - tokens = append(tokens, - highlightedToken{ - line: uint32(keywordChild.StartPoint().Row), - column: uint32(keywordChild.StartPoint().Column), - length: keywordChild.EndByte() - keywordChild.StartByte(), - tokenType: token_keyword, - tokenModifiers: nil, - }, - ) - } - tokens = append(tokens, highlightedTokensEntriesForNode(child)...) - default: - tokens = append(tokens, highlightedTokensEntriesForNode(child)...) } } - return tokens + + return &protocol.SemanticTokens{ + Data: serializeHighlightedTokens(tokens), + }, nil +} + +func tokenTypeForCaptureName(captureName string) *tokenType { + switch captureName { + case "keyword": + return &token_keyword + case "operator": + return &token_operator + case "punctuation": + return &token_operator + case "punctuation.bracket": + return &token_operator + case "variable": + return nil + case "variable.parameter": + return &token_parameter + case "variable.builtin": + return &token_variable + case "variable.import": + return &token_namespace + case "constant.numeric": + return &token_number + case "comment": + return &token_comment + case "function": + return &token_function + case "function.builtin": + return &token_function + case "method": + return &token_method + case "type": + return &token_type + case "type.enum": + return &token_enum + case "type.case": + return &token_enumMember + case "type.data": + return &token_class + case "type.builtin": + return &token_type + case "property": + return &token_property + case "string": + return &token_string + case "string.special": + return &token_string + case "error": + return nil + default: + return nil + } +} + +func tokenModifiersForCaptureName(captureName string) []tokenModifier { + switch captureName { + case "variable": + return []tokenModifier{modifier_readonly} + case "type.enum", "type.data": + return []tokenModifier{modifier_declaration} + case "variable.builtin", "function.builtin", "type.builtin": + return []tokenModifier{modifier_declaration, modifier_defaultLibrary, modifier_static, modifier_readonly} + case "string.special": + return []tokenModifier{modifier_modification} + default: + return nil + } } diff --git a/langsrv/semantic-highlighting.go b/langsrv/semantic-highlighting.go index 414c70c..dd36e66 100644 --- a/langsrv/semantic-highlighting.go +++ b/langsrv/semantic-highlighting.go @@ -82,11 +82,11 @@ var allTokenTypes = []tokenType{ func (tt tokenType) bitflag() protocol.UInteger { switch tt { case token_namespace: - return 1 + return 0 case token_class: - return 2 + return 1 case token_enum: - return 3 + return 2 case token_interface: return 3 case token_struct: @@ -173,25 +173,25 @@ var allTokenModifiers = []tokenModifier{ func (tm tokenModifier) bitflag() protocol.UInteger { switch tm { case modifier_declaration: - return 1 + return 0b0000000001 case modifier_definition: - return 2 + return 0b0000000010 case modifier_readonly: - return 3 + return 0b0000000100 case modifier_static: - return 4 + return 0b0000001000 case modifier_deprecated: - return 5 + return 0b0000010000 case modifier_abstract: - return 6 + return 0b0000100000 case modifier_async: - return 7 + return 0b0001000000 case modifier_modification: - return 8 + return 0b0010000000 case modifier_documentation: - return 9 + return 0b0100000000 case modifier_defaultLibrary: - return 10 + return 0b1000000000 default: return 0 } From 659302e0c7f2a84c56107f9d7ee6f154fdaeaabc Mon Sep 17 00:00:00 2001 From: Valentin Knabel Date: Tue, 8 Feb 2022 00:06:57 +0100 Subject: [PATCH 2/3] fix: highlighting issues while typing #28 --- langsrv/document-cache.go | 18 +++++++++ langsrv/handler-text-document-did-change.go | 37 +++++++++++++++++++ langsrv/handler-text-document-did-open.go | 28 ++++++++++++++ ...dler-text-document-semantic-tokens-full.go | 9 ++--- langsrv/lang-server.go | 28 ++++++++++---- 5 files changed, 107 insertions(+), 13 deletions(-) create mode 100644 langsrv/document-cache.go create mode 100644 langsrv/handler-text-document-did-change.go create mode 100644 langsrv/handler-text-document-did-open.go diff --git a/langsrv/document-cache.go b/langsrv/document-cache.go new file mode 100644 index 0000000..9915e4e --- /dev/null +++ b/langsrv/document-cache.go @@ -0,0 +1,18 @@ +package langsrv + +import ( + protocol "github.com/tliron/glsp/protocol_3_16" + "github.com/vknabel/lithia/ast" + "github.com/vknabel/lithia/parser" +) + +type documentCache struct { + documents map[protocol.URI]*textDocumentEntry +} + +type textDocumentEntry struct { + item protocol.TextDocumentItem + parser *parser.Parser + fileParser *parser.FileParser + sourceFile *ast.SourceFile +} diff --git a/langsrv/handler-text-document-did-change.go b/langsrv/handler-text-document-did-change.go new file mode 100644 index 0000000..044413c --- /dev/null +++ b/langsrv/handler-text-document-did-change.go @@ -0,0 +1,37 @@ +package langsrv + +import ( + "github.com/tliron/glsp" + protocol "github.com/tliron/glsp/protocol_3_16" + "github.com/vknabel/lithia/parser" +) + +func textDocumentDidChange(context *glsp.Context, params *protocol.DidChangeTextDocumentParams) error { + entry := langserver.documentCache.documents[params.TextDocument.URI] + text := entry.item.Text + for _, event := range params.ContentChanges { + switch e := event.(type) { + case protocol.TextDocumentContentChangeEvent: + langserver.server.Log.Infof("from: %s", text) + text = text[:e.Range.Start.IndexIn(text)] + e.Text + text[e.Range.End.IndexIn(text):] + langserver.server.Log.Infof("to: %s", text) + case protocol.TextDocumentContentChangeEventWhole: + text = e.Text + } + } + entry.item.Text = text + fileParser, errs := entry.parser.Parse("default-module", string(params.TextDocument.URI), text) + if len(errs) > 0 { + // TODO: syntax errors + return parser.NewGroupedSyntaxError(errs) + } + sourceFile, errs := fileParser.ParseSourceFile() + if len(errs) > 0 { + // TODO: syntax errors + return parser.NewGroupedSyntaxError(errs) + } + langserver.server.Log.Infof("%s: %s", params.TextDocument.URI, text) + langserver.documentCache.documents[params.TextDocument.URI].fileParser = fileParser + langserver.documentCache.documents[params.TextDocument.URI].sourceFile = sourceFile + return nil +} diff --git a/langsrv/handler-text-document-did-open.go b/langsrv/handler-text-document-did-open.go new file mode 100644 index 0000000..14227f1 --- /dev/null +++ b/langsrv/handler-text-document-did-open.go @@ -0,0 +1,28 @@ +package langsrv + +import ( + "github.com/tliron/glsp" + protocol "github.com/tliron/glsp/protocol_3_16" + "github.com/vknabel/lithia/parser" +) + +func textDocumentDidOpen(context *glsp.Context, params *protocol.DidOpenTextDocumentParams) error { + lithiaParser := parser.NewParser() + fileParser, errs := lithiaParser.Parse("default-module", string(params.TextDocument.URI), params.TextDocument.Text) + if len(errs) > 0 { + // TODO: syntax errors + return parser.NewGroupedSyntaxError(errs) + } + sourceFile, errs := fileParser.ParseSourceFile() + if len(errs) > 0 { + // TODO: syntax errors + return parser.NewGroupedSyntaxError(errs) + } + langserver.documentCache.documents[params.TextDocument.URI] = &textDocumentEntry{ + item: params.TextDocument, + parser: lithiaParser, + fileParser: fileParser, + sourceFile: sourceFile, + } + return nil +} diff --git a/langsrv/handler-text-document-semantic-tokens-full.go b/langsrv/handler-text-document-semantic-tokens-full.go index 81eaa81..eee58c1 100644 --- a/langsrv/handler-text-document-semantic-tokens-full.go +++ b/langsrv/handler-text-document-semantic-tokens-full.go @@ -8,12 +8,11 @@ import ( ) func textDocumentSemanticTokensFull(context *glsp.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { - rc := NewReqContext(params.TextDocument) - fileParser, err := rc.createFileParser() - if err != nil && fileParser == nil { - return nil, err + entry := langserver.documentCache.documents[params.TextDocument.URI] + if entry == nil { + return nil, nil } - rootNode := fileParser.Tree.RootNode() + rootNode := entry.fileParser.Tree.RootNode() highlightsQuery, err := sitter.NewQuery([]byte(` [ "func" diff --git a/langsrv/lang-server.go b/langsrv/lang-server.go index a92aabb..3ab422d 100644 --- a/langsrv/lang-server.go +++ b/langsrv/lang-server.go @@ -12,6 +12,15 @@ var lsName = "lithia" var debug = true var handler protocol.Handler +type lithiaLangserver struct { + server *server.Server + documentCache *documentCache +} + +var langserver lithiaLangserver = lithiaLangserver{ + documentCache: &documentCache{documents: make(map[protocol.URI]*textDocumentEntry)}, +} + func init() { logging.Configure(1, nil) @@ -21,6 +30,9 @@ func init() { Shutdown: shutdown, SetTrace: setTrace, + TextDocumentDidOpen: textDocumentDidOpen, + TextDocumentDidChange: textDocumentDidChange, + TextDocumentHover: textDocumentHover, TextDocumentCompletion: textDocumentCompletion, TextDocumentDefinition: textDocumentDefinition, @@ -32,21 +44,21 @@ func init() { } func RunStdio() error { - server := server.NewServer(&handler, lsName, debug) - return server.RunStdio() + langserver.server = server.NewServer(&handler, lsName, debug) + return langserver.server.RunStdio() } func RunIPC() error { - server := server.NewServer(&handler, lsName, debug) - return server.RunNodeJs() + langserver.server = server.NewServer(&handler, lsName, debug) + return langserver.server.RunNodeJs() } func RunSocket(address string) error { - server := server.NewServer(&handler, lsName, debug) - return server.RunWebSocket(address) + langserver.server = server.NewServer(&handler, lsName, debug) + return langserver.server.RunWebSocket(address) } func RunTCP(address string) error { - server := server.NewServer(&handler, lsName, debug) - return server.RunTCP(address) + langserver.server = server.NewServer(&handler, lsName, debug) + return langserver.server.RunTCP(address) } From 5b10b0f3fa032596cafedc70ec9471162904f205 Mon Sep 17 00:00:00 2001 From: Valentin Knabel Date: Tue, 8 Feb 2022 00:25:36 +0100 Subject: [PATCH 3/3] feat: lsp diagnostics #29 --- CHANGELOG.md | 3 +- langsrv/handler-text-document-did-change.go | 13 +++--- langsrv/handler-text-document-did-open.go | 9 ++-- langsrv/publish-diagnostics.go | 48 +++++++++++++++++++++ 4 files changed, 60 insertions(+), 13 deletions(-) create mode 100644 langsrv/publish-diagnostics.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f2e0ec..e430f9b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,9 @@ # Changelog -## v0.0.13-next +## v0.0.13 - lsp: semantic syntax highlighting #28 +- lsp: diagnostics #29 ## v0.0.12 diff --git a/langsrv/handler-text-document-did-change.go b/langsrv/handler-text-document-did-change.go index 044413c..39039f5 100644 --- a/langsrv/handler-text-document-did-change.go +++ b/langsrv/handler-text-document-did-change.go @@ -3,7 +3,6 @@ package langsrv import ( "github.com/tliron/glsp" protocol "github.com/tliron/glsp/protocol_3_16" - "github.com/vknabel/lithia/parser" ) func textDocumentDidChange(context *glsp.Context, params *protocol.DidChangeTextDocumentParams) error { @@ -12,9 +11,7 @@ func textDocumentDidChange(context *glsp.Context, params *protocol.DidChangeText for _, event := range params.ContentChanges { switch e := event.(type) { case protocol.TextDocumentContentChangeEvent: - langserver.server.Log.Infof("from: %s", text) text = text[:e.Range.Start.IndexIn(text)] + e.Text + text[e.Range.End.IndexIn(text):] - langserver.server.Log.Infof("to: %s", text) case protocol.TextDocumentContentChangeEventWhole: text = e.Text } @@ -22,16 +19,16 @@ func textDocumentDidChange(context *glsp.Context, params *protocol.DidChangeText entry.item.Text = text fileParser, errs := entry.parser.Parse("default-module", string(params.TextDocument.URI), text) if len(errs) > 0 { - // TODO: syntax errors - return parser.NewGroupedSyntaxError(errs) + publishSyntaxErrorDiagnostics(context, params.TextDocument.URI, uint32(params.TextDocument.Version), errs) + return nil } sourceFile, errs := fileParser.ParseSourceFile() if len(errs) > 0 { - // TODO: syntax errors - return parser.NewGroupedSyntaxError(errs) + publishSyntaxErrorDiagnostics(context, params.TextDocument.URI, uint32(params.TextDocument.Version), errs) + return nil } - langserver.server.Log.Infof("%s: %s", params.TextDocument.URI, text) langserver.documentCache.documents[params.TextDocument.URI].fileParser = fileParser langserver.documentCache.documents[params.TextDocument.URI].sourceFile = sourceFile + publishSyntaxErrorDiagnostics(context, params.TextDocument.URI, uint32(params.TextDocument.Version), nil) return nil } diff --git a/langsrv/handler-text-document-did-open.go b/langsrv/handler-text-document-did-open.go index 14227f1..ead40e8 100644 --- a/langsrv/handler-text-document-did-open.go +++ b/langsrv/handler-text-document-did-open.go @@ -10,13 +10,13 @@ func textDocumentDidOpen(context *glsp.Context, params *protocol.DidOpenTextDocu lithiaParser := parser.NewParser() fileParser, errs := lithiaParser.Parse("default-module", string(params.TextDocument.URI), params.TextDocument.Text) if len(errs) > 0 { - // TODO: syntax errors - return parser.NewGroupedSyntaxError(errs) + publishSyntaxErrorDiagnostics(context, params.TextDocument.URI, uint32(params.TextDocument.Version), errs) + return nil } sourceFile, errs := fileParser.ParseSourceFile() if len(errs) > 0 { - // TODO: syntax errors - return parser.NewGroupedSyntaxError(errs) + publishSyntaxErrorDiagnostics(context, params.TextDocument.URI, uint32(params.TextDocument.Version), errs) + return nil } langserver.documentCache.documents[params.TextDocument.URI] = &textDocumentEntry{ item: params.TextDocument, @@ -24,5 +24,6 @@ func textDocumentDidOpen(context *glsp.Context, params *protocol.DidOpenTextDocu fileParser: fileParser, sourceFile: sourceFile, } + publishSyntaxErrorDiagnostics(context, params.TextDocument.URI, uint32(params.TextDocument.Version), nil) return nil } diff --git a/langsrv/publish-diagnostics.go b/langsrv/publish-diagnostics.go new file mode 100644 index 0000000..dc9286d --- /dev/null +++ b/langsrv/publish-diagnostics.go @@ -0,0 +1,48 @@ +package langsrv + +import ( + "github.com/tliron/glsp" + protocol "github.com/tliron/glsp/protocol_3_16" + "github.com/vknabel/lithia/parser" +) + +func publishSyntaxErrorDiagnostics(context *glsp.Context, textDocumentURI protocol.URI, version uint32, errs []parser.SyntaxError) { + diagnostics := make([]protocol.Diagnostic, len(errs)) + for i, err := range errs { + diagnostics[i] = syntaxErrorToDiagnostic(err) + } + + versionRef := version + context.Notify(protocol.ServerTextDocumentPublishDiagnostics, protocol.PublishDiagnosticsParams{ + URI: textDocumentURI, + Version: &versionRef, + Diagnostics: diagnostics, + }) +} + +func syntaxErrorToDiagnostic(err parser.SyntaxError) protocol.Diagnostic { + return protocol.Diagnostic{ + Source: &lsName, + Range: rangeFromSourceLocation(err.SourceLocation), + Severity: newSeverityRef(protocol.DiagnosticSeverityError), + Message: err.Message, + } +} + +func rangeFromSourceLocation(location parser.SourceLocation) protocol.Range { + return protocol.Range{ + Start: protocol.Position{ + Line: location.Node.StartPoint().Row, + Character: location.Node.StartPoint().Column, + }, + End: protocol.Position{ + Line: location.Node.EndPoint().Row, + Character: location.Node.EndPoint().Column, + }, + } +} + +func newSeverityRef(sev protocol.DiagnosticSeverity) *protocol.DiagnosticSeverity { + severity := sev + return &severity +}