|
| 1 | +import ts from 'typescript'; |
| 2 | +import { SemanticTokensLegend, SemanticTokenModifiers, SemanticTokenTypes } from 'vscode-languageserver'; |
| 3 | +import { RuntimeLibrary } from '../../services/dependencyService'; |
| 4 | +import { SemanticTokenOffsetData } from '../../types'; |
| 5 | + |
| 6 | +/* tslint:disable:max-line-length */ |
| 7 | +/** |
| 8 | + * extended from https://github.com/microsoft/TypeScript/blob/35c8df04ad959224fad9037e340c1e50f0540a49/src/services/classifier2020.ts#L9 |
| 9 | + * so that we don't have to map it into our own legend |
| 10 | + */ |
| 11 | +export const enum TokenType { |
| 12 | + class, |
| 13 | + enum, |
| 14 | + interface, |
| 15 | + namespace, |
| 16 | + typeParameter, |
| 17 | + type, |
| 18 | + parameter, |
| 19 | + variable, |
| 20 | + enumMember, |
| 21 | + property, |
| 22 | + function, |
| 23 | + member |
| 24 | +} |
| 25 | + |
| 26 | +/* tslint:disable:max-line-length */ |
| 27 | +/** |
| 28 | + * adopted from https://github.com/microsoft/TypeScript/blob/35c8df04ad959224fad9037e340c1e50f0540a49/src/services/classifier2020.ts#L13 |
| 29 | + * so that we don't have to map it into our own legend |
| 30 | + */ |
| 31 | +export const enum TokenModifier { |
| 32 | + declaration, |
| 33 | + static, |
| 34 | + async, |
| 35 | + readonly, |
| 36 | + defaultLibrary, |
| 37 | + local, |
| 38 | + |
| 39 | + // vue composition api |
| 40 | + refValue |
| 41 | +} |
| 42 | + |
| 43 | +export function getSemanticTokenLegends(): SemanticTokensLegend { |
| 44 | + const tokenModifiers: string[] = []; |
| 45 | + |
| 46 | + ([ |
| 47 | + [TokenModifier.declaration, SemanticTokenModifiers.declaration], |
| 48 | + [TokenModifier.static, SemanticTokenModifiers.static], |
| 49 | + [TokenModifier.async, SemanticTokenModifiers.async], |
| 50 | + [TokenModifier.readonly, SemanticTokenModifiers.readonly], |
| 51 | + [TokenModifier.defaultLibrary, SemanticTokenModifiers.defaultLibrary], |
| 52 | + [TokenModifier.local, 'local'], |
| 53 | + |
| 54 | + // vue |
| 55 | + [TokenModifier.refValue, 'refValue'] |
| 56 | + ] as const).forEach(([tsModifier, legend]) => (tokenModifiers[tsModifier] = legend)); |
| 57 | + |
| 58 | + const tokenTypes: string[] = []; |
| 59 | + |
| 60 | + ([ |
| 61 | + [TokenType.class, SemanticTokenTypes.class], |
| 62 | + [TokenType.enum, SemanticTokenTypes.enum], |
| 63 | + [TokenType.interface, SemanticTokenTypes.interface], |
| 64 | + [TokenType.namespace, SemanticTokenTypes.namespace], |
| 65 | + [TokenType.typeParameter, SemanticTokenTypes.typeParameter], |
| 66 | + [TokenType.type, SemanticTokenTypes.type], |
| 67 | + [TokenType.parameter, SemanticTokenTypes.parameter], |
| 68 | + [TokenType.variable, SemanticTokenTypes.variable], |
| 69 | + [TokenType.enumMember, SemanticTokenTypes.enumMember], |
| 70 | + [TokenType.property, SemanticTokenTypes.property], |
| 71 | + [TokenType.function, SemanticTokenTypes.function], |
| 72 | + |
| 73 | + // member is renamed to method in vscode codebase to match LSP default |
| 74 | + [TokenType.member, SemanticTokenTypes.method] |
| 75 | + ] as const).forEach(([tokenType, legend]) => (tokenTypes[tokenType] = legend)); |
| 76 | + |
| 77 | + return { |
| 78 | + tokenModifiers, |
| 79 | + tokenTypes |
| 80 | + }; |
| 81 | +} |
| 82 | + |
| 83 | +export function getTokenTypeFromClassification(tsClassification: number): number { |
| 84 | + return (tsClassification >> TokenEncodingConsts.typeOffset) - 1; |
| 85 | +} |
| 86 | + |
| 87 | +export function getTokenModifierFromClassification(tsClassification: number) { |
| 88 | + return tsClassification & TokenEncodingConsts.modifierMask; |
| 89 | +} |
| 90 | + |
| 91 | +const enum TokenEncodingConsts { |
| 92 | + typeOffset = 8, |
| 93 | + modifierMask = (1 << typeOffset) - 1 |
| 94 | +} |
| 95 | + |
| 96 | +export function addCompositionApiRefTokens( |
| 97 | + tsModule: RuntimeLibrary['typescript'], |
| 98 | + program: ts.Program, |
| 99 | + fileFsPath: string, |
| 100 | + exists: SemanticTokenOffsetData[] |
| 101 | +): void { |
| 102 | + const sourceFile = program.getSourceFile(fileFsPath); |
| 103 | + |
| 104 | + if (!sourceFile) { |
| 105 | + return; |
| 106 | + } |
| 107 | + |
| 108 | + const typeChecker = program.getTypeChecker(); |
| 109 | + |
| 110 | + walk(sourceFile, node => { |
| 111 | + if (!ts.isIdentifier(node) || node.text !== 'value' || !ts.isPropertyAccessExpression(node.parent)) { |
| 112 | + return; |
| 113 | + } |
| 114 | + const propertyAccess = node.parent; |
| 115 | + |
| 116 | + let parentSymbol = typeChecker.getTypeAtLocation(propertyAccess.expression).symbol; |
| 117 | + |
| 118 | + if (parentSymbol.flags & tsModule.SymbolFlags.Alias) { |
| 119 | + parentSymbol = typeChecker.getAliasedSymbol(parentSymbol); |
| 120 | + } |
| 121 | + |
| 122 | + if (parentSymbol.name !== 'Ref') { |
| 123 | + return; |
| 124 | + } |
| 125 | + |
| 126 | + const start = node.getStart(); |
| 127 | + const length = node.getWidth(); |
| 128 | + const exist = exists.find(token => token.start === start && token.length === length); |
| 129 | + const encodedModifier = 1 << TokenModifier.refValue; |
| 130 | + |
| 131 | + if (exist) { |
| 132 | + exist.modifierSet |= encodedModifier; |
| 133 | + } else { |
| 134 | + exists.push({ |
| 135 | + classificationType: TokenType.property, |
| 136 | + length: node.getEnd() - node.getStart(), |
| 137 | + modifierSet: encodedModifier, |
| 138 | + start: node.getStart() |
| 139 | + }); |
| 140 | + } |
| 141 | + }); |
| 142 | +} |
| 143 | + |
| 144 | +function walk(node: ts.Node, callback: (node: ts.Node) => void) { |
| 145 | + node.forEachChild(child => { |
| 146 | + callback(child); |
| 147 | + walk(child, callback); |
| 148 | + }); |
| 149 | +} |
0 commit comments