Skip to content

Commit 31f0de8

Browse files
committed
feat: syntax highlighting without doc sync #28
1 parent e21e7a6 commit 31f0de8

File tree

4 files changed

+179
-137
lines changed

4 files changed

+179
-137
lines changed

.vscode/settings.json

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
{
2+
"lithia.path": "/Users/vknabel/dev/lithia/lithia"
3+
}

CHANGELOG.md

+4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Changelog
22

3+
## v0.0.13-next
4+
5+
- lsp: semantic syntax highlighting #28
6+
37
## v0.0.12
48

59
- cli: new CLI interface, including, help and version

langsrv/handler-text-document-semantic-tokens-full.go

+159-124
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import (
44
sitter "github.com/smacker/go-tree-sitter"
55
"github.com/tliron/glsp"
66
protocol "github.com/tliron/glsp/protocol_3_16"
7-
"github.com/vknabel/lithia/parser"
7+
syntax "github.com/vknabel/tree-sitter-lithia"
88
)
99

1010
func textDocumentSemanticTokensFull(context *glsp.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
@@ -14,134 +14,169 @@ func textDocumentSemanticTokensFull(context *glsp.Context, params *protocol.Sema
1414
return nil, err
1515
}
1616
rootNode := fileParser.Tree.RootNode()
17-
tokens := highlightedTokensEntriesForNode(rootNode)
18-
return &protocol.SemanticTokens{
19-
Data: serializeHighlightedTokens(tokens),
20-
}, nil
21-
}
17+
highlightsQuery, err := sitter.NewQuery([]byte(`
18+
[
19+
"func"
20+
"let"
21+
"enum"
22+
"data"
23+
"module"
24+
"import"
25+
"extern"
26+
"type"
27+
] @keyword
28+
29+
[
30+
"=>"
31+
] @operator
32+
33+
[
34+
","
35+
"."
36+
] @punctuation
37+
38+
[
39+
"{"
40+
"}"
41+
"("
42+
")"
43+
"["
44+
"]"
45+
] @punctuation.bracket
46+
47+
(binary_expression operator: (["*" "/" "+" "-" "==" "!=" ">=" ">" "<" "<=" "&&" "||"]) @operator) ; extract
48+
(unary_expression operator: (["!"]) @operator)
49+
50+
(parameter_list (identifier) @variable.parameter)
51+
(number_literal) @constant.numeric
52+
(comment) @comment
53+
(function_declaration name: (identifier) @function)
54+
(let_declaration name: (identifier) @variable)
55+
(enum_declaration name: (identifier) @type.enum)
56+
(enum_case_reference) @type.case
57+
(data_declaration name: (identifier) @type.data)
58+
(data_property_function name: (identifier) @function)
59+
(data_property_value name: (identifier) @property)
60+
(extern_declaration
61+
name: (identifier) @variable.builtin
62+
!properties
63+
!parameters)
64+
(extern_declaration
65+
name: (identifier) @function.builtin
66+
!properties)
67+
(extern_declaration
68+
name: (identifier) @type.builtin
69+
!parameters)
70+
(import_declaration name: (import_module) @variable.import)
71+
(import_members (identifier) @variable.import)
72+
(module_declaration name: (identifier) @variable.import)
73+
(complex_invocation_expression function: (identifier) @function)
74+
(simple_invocation_expression function: (identifier) @function)
75+
(string_literal) @string
76+
(escape_sequence) @string.special
77+
(type_expression type: (identifier) @type.enum)
78+
(type_case label: (identifier) @type.case)
79+
(simple_invocation_expression function: (member_access (member_identifier) @function @method))
80+
(complex_invocation_expression function: (member_access (member_identifier) @function @method))
81+
(member_identifier) @property
82+
83+
(ERROR) @error
84+
(identifier) @variable
85+
`), syntax.GetLanguage())
86+
if err != nil {
87+
return nil, err
88+
}
89+
cursor := sitter.NewQueryCursor()
90+
cursor.Exec(highlightsQuery, rootNode)
91+
defer cursor.Close()
2292

23-
func highlightedTokensEntriesForNode(node *sitter.Node) []highlightedToken {
2493
tokens := make([]highlightedToken, 0)
25-
childCount := int(node.ChildCount())
26-
for i := 0; i < childCount; i++ {
27-
child := node.Child(i)
28-
switch child.Type() {
29-
case parser.TYPE_NODE_MODULE_DECLARATION:
30-
nameChild := child.ChildByFieldName("name")
31-
if nameChild != nil {
32-
tokens = append(tokens, highlightedToken{
33-
line: uint32(nameChild.StartPoint().Row),
34-
column: uint32(nameChild.StartPoint().Column),
35-
length: nameChild.EndByte() - nameChild.StartByte(),
36-
tokenType: token_namespace,
37-
tokenModifiers: []tokenModifier{modifier_declaration},
38-
})
94+
for match, ok := cursor.NextMatch(); ok; match, ok = cursor.NextMatch() {
95+
for _, capture := range match.Captures {
96+
captureName := highlightsQuery.CaptureNameForId(capture.Index)
97+
capturedNode := capture.Node
98+
tokenType := tokenTypeForCaptureName(captureName)
99+
if tokenType == nil {
100+
continue
39101
}
40-
keywordChild := child.Child(0)
41-
if keywordChild != nil {
42-
tokens = append(tokens,
43-
highlightedToken{
44-
line: uint32(keywordChild.StartPoint().Row),
45-
column: uint32(keywordChild.StartPoint().Column),
46-
length: keywordChild.EndByte() - keywordChild.StartByte(),
47-
tokenType: token_keyword,
48-
tokenModifiers: nil,
49-
},
50-
)
51-
}
52-
case parser.TYPE_NODE_NUMBER_LITERAL:
53-
tokens = append(tokens,
54-
highlightedToken{
55-
line: uint32(child.StartPoint().Row),
56-
column: uint32(child.StartPoint().Column),
57-
length: child.EndByte() - child.StartByte(),
58-
tokenType: token_number,
59-
tokenModifiers: nil,
60-
},
61-
)
62-
case parser.TYPE_NODE_STRING_LITERAL:
102+
tokenModifiers := tokenModifiersForCaptureName(captureName)
63103
tokens = append(tokens, highlightedToken{
64-
line: uint32(child.StartPoint().Row),
65-
column: uint32(child.StartPoint().Column),
66-
length: child.EndByte() - child.StartByte(),
67-
tokenType: token_string,
68-
tokenModifiers: nil,
104+
line: uint32(capturedNode.StartPoint().Row),
105+
column: uint32(capturedNode.StartPoint().Column),
106+
length: capturedNode.EndByte() - capturedNode.StartByte(),
107+
tokenType: *tokenType,
108+
tokenModifiers: tokenModifiers,
69109
})
70-
case parser.TYPE_NODE_COMMENT:
71-
tokens = append(tokens,
72-
highlightedToken{
73-
line: uint32(child.StartPoint().Row),
74-
column: uint32(child.StartPoint().Column),
75-
length: child.EndByte() - child.StartByte(),
76-
tokenType: token_comment,
77-
tokenModifiers: nil,
78-
},
79-
)
80-
case parser.TYPE_NODE_DATA_DECLARATION:
81-
keywordChild := child.Child(0)
82-
if keywordChild != nil {
83-
tokens = append(tokens,
84-
highlightedToken{
85-
line: uint32(keywordChild.StartPoint().Row),
86-
column: uint32(keywordChild.StartPoint().Column),
87-
length: keywordChild.EndByte() - keywordChild.StartByte(),
88-
tokenType: token_keyword,
89-
tokenModifiers: nil,
90-
},
91-
)
92-
}
93-
nameChild := child.ChildByFieldName("name")
94-
if nameChild != nil {
95-
tokens = append(tokens, highlightedToken{
96-
line: uint32(nameChild.StartPoint().Row),
97-
column: uint32(nameChild.StartPoint().Column),
98-
length: nameChild.EndByte() - nameChild.StartByte(),
99-
tokenType: token_struct,
100-
tokenModifiers: []tokenModifier{modifier_declaration},
101-
})
102-
}
103-
tokens = append(tokens, highlightedTokensEntriesForNode(child)...)
104-
case parser.TYPE_NODE_FUNCTION_DECLARATION:
105-
keywordChild := child.Child(0)
106-
if keywordChild != nil {
107-
tokens = append(tokens,
108-
highlightedToken{
109-
line: uint32(keywordChild.StartPoint().Row),
110-
column: uint32(keywordChild.StartPoint().Column),
111-
length: keywordChild.EndByte() - keywordChild.StartByte(),
112-
tokenType: token_keyword,
113-
tokenModifiers: nil,
114-
},
115-
)
116-
}
117-
nameChild := child.ChildByFieldName("name")
118-
if nameChild != nil {
119-
tokens = append(tokens, highlightedToken{
120-
line: uint32(nameChild.StartPoint().Row),
121-
column: uint32(nameChild.StartPoint().Column),
122-
length: nameChild.EndByte() - nameChild.StartByte(),
123-
tokenType: token_function,
124-
tokenModifiers: []tokenModifier{modifier_declaration},
125-
})
126-
}
127-
tokens = append(tokens, highlightedTokensEntriesForNode(child)...)
128-
case parser.TYPE_NODE_TYPE_EXPRESSION:
129-
keywordChild := child.Child(0)
130-
if keywordChild != nil {
131-
tokens = append(tokens,
132-
highlightedToken{
133-
line: uint32(keywordChild.StartPoint().Row),
134-
column: uint32(keywordChild.StartPoint().Column),
135-
length: keywordChild.EndByte() - keywordChild.StartByte(),
136-
tokenType: token_keyword,
137-
tokenModifiers: nil,
138-
},
139-
)
140-
}
141-
tokens = append(tokens, highlightedTokensEntriesForNode(child)...)
142-
default:
143-
tokens = append(tokens, highlightedTokensEntriesForNode(child)...)
144110
}
145111
}
146-
return tokens
112+
113+
return &protocol.SemanticTokens{
114+
Data: serializeHighlightedTokens(tokens),
115+
}, nil
116+
}
117+
118+
func tokenTypeForCaptureName(captureName string) *tokenType {
119+
switch captureName {
120+
case "keyword":
121+
return &token_keyword
122+
case "operator":
123+
return &token_operator
124+
case "punctuation":
125+
return &token_operator
126+
case "punctuation.bracket":
127+
return &token_operator
128+
case "variable":
129+
return nil
130+
case "variable.parameter":
131+
return &token_parameter
132+
case "variable.builtin":
133+
return &token_variable
134+
case "variable.import":
135+
return &token_namespace
136+
case "constant.numeric":
137+
return &token_number
138+
case "comment":
139+
return &token_comment
140+
case "function":
141+
return &token_function
142+
case "function.builtin":
143+
return &token_function
144+
case "method":
145+
return &token_method
146+
case "type":
147+
return &token_type
148+
case "type.enum":
149+
return &token_enum
150+
case "type.case":
151+
return &token_enumMember
152+
case "type.data":
153+
return &token_class
154+
case "type.builtin":
155+
return &token_type
156+
case "property":
157+
return &token_property
158+
case "string":
159+
return &token_string
160+
case "string.special":
161+
return &token_string
162+
case "error":
163+
return nil
164+
default:
165+
return nil
166+
}
167+
}
168+
169+
func tokenModifiersForCaptureName(captureName string) []tokenModifier {
170+
switch captureName {
171+
case "variable":
172+
return []tokenModifier{modifier_readonly}
173+
case "type.enum", "type.data":
174+
return []tokenModifier{modifier_declaration}
175+
case "variable.builtin", "function.builtin", "type.builtin":
176+
return []tokenModifier{modifier_declaration, modifier_defaultLibrary, modifier_static, modifier_readonly}
177+
case "string.special":
178+
return []tokenModifier{modifier_modification}
179+
default:
180+
return nil
181+
}
147182
}

langsrv/semantic-highlighting.go

+13-13
Original file line numberDiff line numberDiff line change
@@ -82,11 +82,11 @@ var allTokenTypes = []tokenType{
8282
func (tt tokenType) bitflag() protocol.UInteger {
8383
switch tt {
8484
case token_namespace:
85-
return 1
85+
return 0
8686
case token_class:
87-
return 2
87+
return 1
8888
case token_enum:
89-
return 3
89+
return 2
9090
case token_interface:
9191
return 3
9292
case token_struct:
@@ -173,25 +173,25 @@ var allTokenModifiers = []tokenModifier{
173173
func (tm tokenModifier) bitflag() protocol.UInteger {
174174
switch tm {
175175
case modifier_declaration:
176-
return 1
176+
return 0b0000000001
177177
case modifier_definition:
178-
return 2
178+
return 0b0000000010
179179
case modifier_readonly:
180-
return 3
180+
return 0b0000000100
181181
case modifier_static:
182-
return 4
182+
return 0b0000001000
183183
case modifier_deprecated:
184-
return 5
184+
return 0b0000010000
185185
case modifier_abstract:
186-
return 6
186+
return 0b0000100000
187187
case modifier_async:
188-
return 7
188+
return 0b0001000000
189189
case modifier_modification:
190-
return 8
190+
return 0b0010000000
191191
case modifier_documentation:
192-
return 9
192+
return 0b0100000000
193193
case modifier_defaultLibrary:
194-
return 10
194+
return 0b1000000000
195195
default:
196196
return 0
197197
}

0 commit comments

Comments
 (0)