4
4
sitter "github.com/smacker/go-tree-sitter"
5
5
"github.com/tliron/glsp"
6
6
protocol "github.com/tliron/glsp/protocol_3_16"
7
- "github.com/vknabel/lithia/parser "
7
+ syntax "github.com/vknabel/tree-sitter- lithia"
8
8
)
9
9
10
10
func textDocumentSemanticTokensFull (context * glsp.Context , params * protocol.SemanticTokensParams ) (* protocol.SemanticTokens , error ) {
@@ -14,134 +14,169 @@ func textDocumentSemanticTokensFull(context *glsp.Context, params *protocol.Sema
14
14
return nil , err
15
15
}
16
16
rootNode := fileParser .Tree .RootNode ()
17
- tokens := highlightedTokensEntriesForNode (rootNode )
18
- return & protocol.SemanticTokens {
19
- Data : serializeHighlightedTokens (tokens ),
20
- }, nil
21
- }
17
+ highlightsQuery , err := sitter .NewQuery ([]byte (`
18
+ [
19
+ "func"
20
+ "let"
21
+ "enum"
22
+ "data"
23
+ "module"
24
+ "import"
25
+ "extern"
26
+ "type"
27
+ ] @keyword
28
+
29
+ [
30
+ "=>"
31
+ ] @operator
32
+
33
+ [
34
+ ","
35
+ "."
36
+ ] @punctuation
37
+
38
+ [
39
+ "{"
40
+ "}"
41
+ "("
42
+ ")"
43
+ "["
44
+ "]"
45
+ ] @punctuation.bracket
46
+
47
+ (binary_expression operator: (["*" "/" "+" "-" "==" "!=" ">=" ">" "<" "<=" "&&" "||"]) @operator) ; extract
48
+ (unary_expression operator: (["!"]) @operator)
49
+
50
+ (parameter_list (identifier) @variable.parameter)
51
+ (number_literal) @constant.numeric
52
+ (comment) @comment
53
+ (function_declaration name: (identifier) @function)
54
+ (let_declaration name: (identifier) @variable)
55
+ (enum_declaration name: (identifier) @type.enum)
56
+ (enum_case_reference) @type.case
57
+ (data_declaration name: (identifier) @type.data)
58
+ (data_property_function name: (identifier) @function)
59
+ (data_property_value name: (identifier) @property)
60
+ (extern_declaration
61
+ name: (identifier) @variable.builtin
62
+ !properties
63
+ !parameters)
64
+ (extern_declaration
65
+ name: (identifier) @function.builtin
66
+ !properties)
67
+ (extern_declaration
68
+ name: (identifier) @type.builtin
69
+ !parameters)
70
+ (import_declaration name: (import_module) @variable.import)
71
+ (import_members (identifier) @variable.import)
72
+ (module_declaration name: (identifier) @variable.import)
73
+ (complex_invocation_expression function: (identifier) @function)
74
+ (simple_invocation_expression function: (identifier) @function)
75
+ (string_literal) @string
76
+ (escape_sequence) @string.special
77
+ (type_expression type: (identifier) @type.enum)
78
+ (type_case label: (identifier) @type.case)
79
+ (simple_invocation_expression function: (member_access (member_identifier) @function @method))
80
+ (complex_invocation_expression function: (member_access (member_identifier) @function @method))
81
+ (member_identifier) @property
82
+
83
+ (ERROR) @error
84
+ (identifier) @variable
85
+ ` ), syntax .GetLanguage ())
86
+ if err != nil {
87
+ return nil , err
88
+ }
89
+ cursor := sitter .NewQueryCursor ()
90
+ cursor .Exec (highlightsQuery , rootNode )
91
+ defer cursor .Close ()
22
92
23
- func highlightedTokensEntriesForNode (node * sitter.Node ) []highlightedToken {
24
93
tokens := make ([]highlightedToken , 0 )
25
- childCount := int (node .ChildCount ())
26
- for i := 0 ; i < childCount ; i ++ {
27
- child := node .Child (i )
28
- switch child .Type () {
29
- case parser .TYPE_NODE_MODULE_DECLARATION :
30
- nameChild := child .ChildByFieldName ("name" )
31
- if nameChild != nil {
32
- tokens = append (tokens , highlightedToken {
33
- line : uint32 (nameChild .StartPoint ().Row ),
34
- column : uint32 (nameChild .StartPoint ().Column ),
35
- length : nameChild .EndByte () - nameChild .StartByte (),
36
- tokenType : token_namespace ,
37
- tokenModifiers : []tokenModifier {modifier_declaration },
38
- })
94
+ for match , ok := cursor .NextMatch (); ok ; match , ok = cursor .NextMatch () {
95
+ for _ , capture := range match .Captures {
96
+ captureName := highlightsQuery .CaptureNameForId (capture .Index )
97
+ capturedNode := capture .Node
98
+ tokenType := tokenTypeForCaptureName (captureName )
99
+ if tokenType == nil {
100
+ continue
39
101
}
40
- keywordChild := child .Child (0 )
41
- if keywordChild != nil {
42
- tokens = append (tokens ,
43
- highlightedToken {
44
- line : uint32 (keywordChild .StartPoint ().Row ),
45
- column : uint32 (keywordChild .StartPoint ().Column ),
46
- length : keywordChild .EndByte () - keywordChild .StartByte (),
47
- tokenType : token_keyword ,
48
- tokenModifiers : nil ,
49
- },
50
- )
51
- }
52
- case parser .TYPE_NODE_NUMBER_LITERAL :
53
- tokens = append (tokens ,
54
- highlightedToken {
55
- line : uint32 (child .StartPoint ().Row ),
56
- column : uint32 (child .StartPoint ().Column ),
57
- length : child .EndByte () - child .StartByte (),
58
- tokenType : token_number ,
59
- tokenModifiers : nil ,
60
- },
61
- )
62
- case parser .TYPE_NODE_STRING_LITERAL :
102
+ tokenModifiers := tokenModifiersForCaptureName (captureName )
63
103
tokens = append (tokens , highlightedToken {
64
- line : uint32 (child .StartPoint ().Row ),
65
- column : uint32 (child .StartPoint ().Column ),
66
- length : child .EndByte () - child .StartByte (),
67
- tokenType : token_string ,
68
- tokenModifiers : nil ,
104
+ line : uint32 (capturedNode .StartPoint ().Row ),
105
+ column : uint32 (capturedNode .StartPoint ().Column ),
106
+ length : capturedNode .EndByte () - capturedNode .StartByte (),
107
+ tokenType : * tokenType ,
108
+ tokenModifiers : tokenModifiers ,
69
109
})
70
- case parser .TYPE_NODE_COMMENT :
71
- tokens = append (tokens ,
72
- highlightedToken {
73
- line : uint32 (child .StartPoint ().Row ),
74
- column : uint32 (child .StartPoint ().Column ),
75
- length : child .EndByte () - child .StartByte (),
76
- tokenType : token_comment ,
77
- tokenModifiers : nil ,
78
- },
79
- )
80
- case parser .TYPE_NODE_DATA_DECLARATION :
81
- keywordChild := child .Child (0 )
82
- if keywordChild != nil {
83
- tokens = append (tokens ,
84
- highlightedToken {
85
- line : uint32 (keywordChild .StartPoint ().Row ),
86
- column : uint32 (keywordChild .StartPoint ().Column ),
87
- length : keywordChild .EndByte () - keywordChild .StartByte (),
88
- tokenType : token_keyword ,
89
- tokenModifiers : nil ,
90
- },
91
- )
92
- }
93
- nameChild := child .ChildByFieldName ("name" )
94
- if nameChild != nil {
95
- tokens = append (tokens , highlightedToken {
96
- line : uint32 (nameChild .StartPoint ().Row ),
97
- column : uint32 (nameChild .StartPoint ().Column ),
98
- length : nameChild .EndByte () - nameChild .StartByte (),
99
- tokenType : token_struct ,
100
- tokenModifiers : []tokenModifier {modifier_declaration },
101
- })
102
- }
103
- tokens = append (tokens , highlightedTokensEntriesForNode (child )... )
104
- case parser .TYPE_NODE_FUNCTION_DECLARATION :
105
- keywordChild := child .Child (0 )
106
- if keywordChild != nil {
107
- tokens = append (tokens ,
108
- highlightedToken {
109
- line : uint32 (keywordChild .StartPoint ().Row ),
110
- column : uint32 (keywordChild .StartPoint ().Column ),
111
- length : keywordChild .EndByte () - keywordChild .StartByte (),
112
- tokenType : token_keyword ,
113
- tokenModifiers : nil ,
114
- },
115
- )
116
- }
117
- nameChild := child .ChildByFieldName ("name" )
118
- if nameChild != nil {
119
- tokens = append (tokens , highlightedToken {
120
- line : uint32 (nameChild .StartPoint ().Row ),
121
- column : uint32 (nameChild .StartPoint ().Column ),
122
- length : nameChild .EndByte () - nameChild .StartByte (),
123
- tokenType : token_function ,
124
- tokenModifiers : []tokenModifier {modifier_declaration },
125
- })
126
- }
127
- tokens = append (tokens , highlightedTokensEntriesForNode (child )... )
128
- case parser .TYPE_NODE_TYPE_EXPRESSION :
129
- keywordChild := child .Child (0 )
130
- if keywordChild != nil {
131
- tokens = append (tokens ,
132
- highlightedToken {
133
- line : uint32 (keywordChild .StartPoint ().Row ),
134
- column : uint32 (keywordChild .StartPoint ().Column ),
135
- length : keywordChild .EndByte () - keywordChild .StartByte (),
136
- tokenType : token_keyword ,
137
- tokenModifiers : nil ,
138
- },
139
- )
140
- }
141
- tokens = append (tokens , highlightedTokensEntriesForNode (child )... )
142
- default :
143
- tokens = append (tokens , highlightedTokensEntriesForNode (child )... )
144
110
}
145
111
}
146
- return tokens
112
+
113
+ return & protocol.SemanticTokens {
114
+ Data : serializeHighlightedTokens (tokens ),
115
+ }, nil
116
+ }
117
+
118
+ func tokenTypeForCaptureName (captureName string ) * tokenType {
119
+ switch captureName {
120
+ case "keyword" :
121
+ return & token_keyword
122
+ case "operator" :
123
+ return & token_operator
124
+ case "punctuation" :
125
+ return & token_operator
126
+ case "punctuation.bracket" :
127
+ return & token_operator
128
+ case "variable" :
129
+ return nil
130
+ case "variable.parameter" :
131
+ return & token_parameter
132
+ case "variable.builtin" :
133
+ return & token_variable
134
+ case "variable.import" :
135
+ return & token_namespace
136
+ case "constant.numeric" :
137
+ return & token_number
138
+ case "comment" :
139
+ return & token_comment
140
+ case "function" :
141
+ return & token_function
142
+ case "function.builtin" :
143
+ return & token_function
144
+ case "method" :
145
+ return & token_method
146
+ case "type" :
147
+ return & token_type
148
+ case "type.enum" :
149
+ return & token_enum
150
+ case "type.case" :
151
+ return & token_enumMember
152
+ case "type.data" :
153
+ return & token_class
154
+ case "type.builtin" :
155
+ return & token_type
156
+ case "property" :
157
+ return & token_property
158
+ case "string" :
159
+ return & token_string
160
+ case "string.special" :
161
+ return & token_string
162
+ case "error" :
163
+ return nil
164
+ default :
165
+ return nil
166
+ }
167
+ }
168
+
169
+ func tokenModifiersForCaptureName (captureName string ) []tokenModifier {
170
+ switch captureName {
171
+ case "variable" :
172
+ return []tokenModifier {modifier_readonly }
173
+ case "type.enum" , "type.data" :
174
+ return []tokenModifier {modifier_declaration }
175
+ case "variable.builtin" , "function.builtin" , "type.builtin" :
176
+ return []tokenModifier {modifier_declaration , modifier_defaultLibrary , modifier_static , modifier_readonly }
177
+ case "string.special" :
178
+ return []tokenModifier {modifier_modification }
179
+ default :
180
+ return nil
181
+ }
147
182
}
0 commit comments