|
1 | 1 | #include "tree_sitter/parser.h"
|
2 | 2 |
|
3 |
| -enum TokenType { |
4 |
| - TYPE_TOKEN |
5 |
| -}; |
| 3 | +enum TokenType { TYPE_TOKEN }; |
6 | 4 |
|
7 | 5 | void *tree_sitter_jsdoc_external_scanner_create() { return NULL; }
|
8 |
| -void tree_sitter_jsdoc_external_scanner_destroy(void *p) {} |
9 |
| -void tree_sitter_jsdoc_external_scanner_reset(void *p) {} |
10 |
| -unsigned tree_sitter_jsdoc_external_scanner_serialize(void *p, char *buffer) { return 0; } |
11 |
| -void tree_sitter_jsdoc_external_scanner_deserialize(void *p, const char *b, unsigned n) {} |
| 6 | + |
| 7 | +void tree_sitter_jsdoc_external_scanner_destroy(void *payload) {} |
| 8 | + |
| 9 | +unsigned tree_sitter_jsdoc_external_scanner_serialize(void *payload, char *buffer) { return 0; } |
| 10 | + |
| 11 | +void tree_sitter_jsdoc_external_scanner_deserialize(void *payload, const char *buffer, unsigned length) {} |
12 | 12 |
|
13 | 13 | // Scan to the next balanced `}` character.
|
14 | 14 | static bool scan_for_type(TSLexer *lexer) {
|
15 |
| - int stack = 0; |
16 |
| - while (true) { |
17 |
| - if (lexer->eof(lexer)) { |
18 |
| - return false; |
| 15 | + int stack = 0; |
| 16 | + while (true) { |
| 17 | + if (lexer->eof(lexer)) { |
| 18 | + return false; |
| 19 | + } |
| 20 | + switch (lexer->lookahead) { |
| 21 | + case '{': |
| 22 | + stack++; |
| 23 | + break; |
| 24 | + case '}': |
| 25 | + stack--; |
| 26 | + if (stack == -1) { |
| 27 | + return true; |
| 28 | + } |
| 29 | + break; |
| 30 | + case '\n': |
| 31 | + case '\0': // fallthrough |
| 32 | + // Something's gone wrong. |
| 33 | + return false; |
| 34 | + default:; |
| 35 | + } |
| 36 | + lexer->advance(lexer, false); |
19 | 37 | }
|
20 |
| - switch (lexer->lookahead) { |
21 |
| - case '{': |
22 |
| - stack++; |
23 |
| - break; |
24 |
| - case '}': |
25 |
| - stack--; |
26 |
| - if (stack == -1) { return true; } |
27 |
| - break; |
28 |
| - case '\n': |
29 |
| - case '\0': // fallthrough |
30 |
| - // Something's gone wrong. |
31 |
| - return false; |
32 |
| - default:; |
33 |
| - } |
34 |
| - lexer->advance(lexer, false); |
35 |
| - } |
36 | 38 | }
|
37 | 39 |
|
38 | 40 | bool tree_sitter_jsdoc_external_scanner_scan(void *payload, TSLexer *lexer, const bool *valid_symbols) {
|
39 |
| - if (valid_symbols[TYPE_TOKEN] && scan_for_type(lexer)) { |
40 |
| - lexer->result_symbol = TYPE_TOKEN; |
41 |
| - lexer->mark_end(lexer); |
42 |
| - return true; |
43 |
| - } |
| 41 | + if (valid_symbols[TYPE_TOKEN] && scan_for_type(lexer)) { |
| 42 | + lexer->result_symbol = TYPE_TOKEN; |
| 43 | + lexer->mark_end(lexer); |
| 44 | + return true; |
| 45 | + } |
44 | 46 |
|
45 |
| - return false; |
| 47 | + return false; |
46 | 48 | }
|
0 commit comments