diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index c178ac6eaaa41..e53fc9d5e3559 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -406,7 +406,7 @@ stages:
- powershell: eng/build.ps1 -configuration Release -prepareMachine -ci -restore -binaryLogName Restore.binlog
displayName: Restore
- - powershell: eng/build.ps1 -configuration Release -prepareMachine -ci -build -pack -publish -sign -binaryLogName Build.binlog /p:DotnetPublishUsingPipelines=true
+ - powershell: eng/build.ps1 -configuration Release -prepareMachine -ci -build -pack -publish -sign -binaryLogName Build.binlog /p:DotnetPublishUsingPipelines=true /p:ContinuousIntegrationBuildCorrectness=true
displayName: Build
# While this task is not executed in the official build, this serves as a PR check for whether symbol exclusions
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 606dfbf18fac0..d5c956241773e 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -8,9 +8,9 @@
-
+
https://github.com/dotnet/source-build-reference-packages
- 5af41f88ed666b5bf29b732471f8b462d1e931de
+ b99d40df247e865fb0a9de15bdcfdfa7f550a55e
diff --git a/eng/pipelines/test-integration-helix.yml b/eng/pipelines/test-integration-helix.yml
index 606d675c72751..4709be392b8a2 100644
--- a/eng/pipelines/test-integration-helix.yml
+++ b/eng/pipelines/test-integration-helix.yml
@@ -33,6 +33,7 @@ stages:
buildArguments: -msbuildEngine vs /p:Projects='"$(Build.Repository.LocalPath)\src\VisualStudio\IntegrationTest\IntegrationTestBuildProject.csproj"'
- stage: ${{ parameters.configuration }}_Integration
+ condition: and(succeeded(), notIn(variables['System.PullRequest.TargetBranchName'], 'main-vs-deps', 'release/dev18.0'))
dependsOn: Windows_${{ parameters.configuration }}_Build
variables:
- name: XUNIT_LOGS
diff --git a/src/EditorFeatures/Test/StackTraceExplorer/StackTraceExplorerTests.cs b/src/EditorFeatures/Test/StackTraceExplorer/StackTraceExplorerTests.cs
index cea9e8f436176..f498aabe36b78 100644
--- a/src/EditorFeatures/Test/StackTraceExplorer/StackTraceExplorerTests.cs
+++ b/src/EditorFeatures/Test/StackTraceExplorer/StackTraceExplorerTests.cs
@@ -32,8 +32,7 @@ private static async Task TestSymbolFoundAsync(string inputLine, string code)
var reparsedResult = await StackTraceAnalyzer.AnalyzeAsync(stackFrame.ToString(), CancellationToken.None);
Assert.Single(reparsedResult.ParsedFrames);
- var reparsedFrame = reparsedResult.ParsedFrames[0] as ParsedStackFrame;
- AssertEx.NotNull(reparsedFrame);
+ var reparsedFrame = Assert.IsType(reparsedResult.ParsedFrames[0]);
StackFrameUtils.AssertEqual(stackFrame.Root, reparsedFrame.Root);
// Get the definition for the parsed frame
@@ -820,9 +819,9 @@ class C
var result = await StackTraceAnalyzer.AnalyzeAsync(line, CancellationToken.None);
Assert.Equal(1, result.ParsedFrames.Length);
- var parsedFame = result.ParsedFrames.OfType().Single();
+ var parsedFrame = Assert.IsType(result.ParsedFrames[0]);
var service = workspace.Services.GetRequiredService();
- var definition = await service.TryFindDefinitionAsync(workspace.CurrentSolution, parsedFame, StackFrameSymbolPart.Method, CancellationToken.None);
+ var definition = await service.TryFindDefinitionAsync(workspace.CurrentSolution, parsedFrame, StackFrameSymbolPart.Method, CancellationToken.None);
Assert.Null(definition);
}
@@ -850,13 +849,89 @@ public async Task TestMetadataSymbol()
var result = await StackTraceAnalyzer.AnalyzeAsync("at System.String.ToLower()", CancellationToken.None);
Assert.Single(result.ParsedFrames);
- var frame = result.ParsedFrames[0] as ParsedStackFrame;
- AssertEx.NotNull(frame);
-
+ var frame = Assert.IsType(result.ParsedFrames[0]);
var service = workspace.Services.GetRequiredService();
var definition = await service.TryFindDefinitionAsync(workspace.CurrentSolution, frame, StackFrameSymbolPart.Method, CancellationToken.None);
AssertEx.NotNull(definition);
Assert.Equal("String.ToLower", definition.NameDisplayParts.ToVisibleDisplayString(includeLeftToRightMarker: false));
}
+
+ [Fact]
+ public async Task TestAdditionalFileExactMatchAsync()
+ {
+ using var workspace = TestWorkspace.Create(
+ """
+
+
+
+ class C
+ {
+ void M() {}
+ }
+
+
+ @page "/"
+
+ @code
+ {
+ void M()
+ {
+ }
+ }
+
+
+
+ """);
+
+ var result = await StackTraceAnalyzer.AnalyzeAsync("at Path.To.Component.M() in C:/path/to/Component.razor:line 5", CancellationToken.None);
+ Assert.Single(result.ParsedFrames);
+
+ var frame = Assert.IsType(result.ParsedFrames[0]);
+ var service = workspace.Services.GetRequiredService();
+ var (document, line) = service.GetDocumentAndLine(workspace.CurrentSolution, frame);
+ Assert.Equal(5, line);
+
+ AssertEx.NotNull(document);
+ Assert.Equal(@"C:/path/to/Component.razor", document.FilePath);
+ }
+
+ [Fact]
+ public async Task TestAdditionalFileNameMatchAsync()
+ {
+ using var workspace = TestWorkspace.Create(
+ """
+
+
+
+ class C
+ {
+ void M() {}
+ }
+
+
+ @page "/"
+
+ @code
+ {
+ void M()
+ {
+ }
+ }
+
+
+
+ """);
+
+ var result = await StackTraceAnalyzer.AnalyzeAsync("at Path.To.Component.M() in Component.razor:line 5", CancellationToken.None);
+ Assert.Single(result.ParsedFrames);
+
+ var frame = Assert.IsType(result.ParsedFrames[0]);
+ var service = workspace.Services.GetRequiredService();
+ var (document, line) = service.GetDocumentAndLine(workspace.CurrentSolution, frame);
+ Assert.Equal(5, line);
+
+ AssertEx.NotNull(document);
+ Assert.Equal(@"C:/path/to/Component.razor", document.FilePath);
+ }
}
diff --git a/src/Features/Core/Portable/StackTraceExplorer/StackTraceExplorerService.cs b/src/Features/Core/Portable/StackTraceExplorer/StackTraceExplorerService.cs
index ba9ccd34b6f6b..22c06ad2d006f 100644
--- a/src/Features/Core/Portable/StackTraceExplorer/StackTraceExplorerService.cs
+++ b/src/Features/Core/Portable/StackTraceExplorer/StackTraceExplorerService.cs
@@ -89,7 +89,7 @@ private static ImmutableArray GetFileMatches(Solution solution, St
if (documentId is not null)
{
- var document = solution.GetRequiredDocument(documentId);
+ var document = solution.GetRequiredTextDocument(documentId);
return [document];
}
@@ -105,7 +105,8 @@ private static ImmutableArray GetFileMatches(Solution solution, St
foreach (var document in allDocuments)
{
- if (string.Equals(document.Name, documentName, StringComparison.OrdinalIgnoreCase))
+ var name = Path.GetFileName(document.Name);
+ if (name.Equals(documentName, StringComparison.OrdinalIgnoreCase))
{
potentialMatches.Add(document);
}
diff --git a/src/Features/ExternalAccess/Copilot/Internal/Analyzer/CSharp/CSharpCopilotCodeAnalysisService.cs b/src/Features/ExternalAccess/Copilot/Internal/Analyzer/CSharp/CSharpCopilotCodeAnalysisService.cs
index 7b64e019e5f80..3aacb5260141a 100644
--- a/src/Features/ExternalAccess/Copilot/Internal/Analyzer/CSharp/CSharpCopilotCodeAnalysisService.cs
+++ b/src/Features/ExternalAccess/Copilot/Internal/Analyzer/CSharp/CSharpCopilotCodeAnalysisService.cs
@@ -57,6 +57,7 @@ IDiagnosticsRefresher diagnosticsRefresher
AnalysisService = externalCopilotService;
GenerateDocumentationService = externalCSharpCopilotGenerateDocumentationService;
OnTheFlyDocsService = externalCSharpOnTheFlyDocsService;
+ GenerateImplementationService = externalCSharpCopilotGenerateImplementationService;
}
protected override Task> AnalyzeDocumentCoreAsync(Document document, TextSpan? span, string promptTitle, CancellationToken cancellationToken)
diff --git a/src/Features/Lsif/Generator/Generator.cs b/src/Features/Lsif/Generator/Generator.cs
index 354e412cf2d71..f885d8521c6d4 100644
--- a/src/Features/Lsif/Generator/Generator.cs
+++ b/src/Features/Lsif/Generator/Generator.cs
@@ -466,19 +466,22 @@ private static async Task GenerateSemanticTokensAsync(
IdFactory idFactory,
LsifDocument documentVertex)
{
+ var cancellationToken = CancellationToken.None;
+
// Compute colorization data.
//
// Unlike the mainline LSP scenario, where we control both the syntactic colorizer (in-proc syntax tagger)
// and the semantic colorizer (LSP semantic tokens) LSIF is more likely to be consumed by clients which may
// have different syntactic classification behavior than us, resulting in missing colors. To avoid this, we
// include syntax tokens in the generated data.
+ var text = await document.GetTextAsync(cancellationToken);
var data = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
// Just get the pure-lsp semantic tokens here.
document,
- spans: [],
+ spans: [text.Lines.GetLinePositionSpan(new TextSpan(0, text.Length))],
supportsVisualStudioExtensions: true,
options: Classification.ClassificationOptions.Default,
- cancellationToken: CancellationToken.None);
+ cancellationToken);
var semanticTokensResult = new SemanticTokensResult(new SemanticTokens { Data = data }, idFactory);
var semanticTokensEdge = Edge.Create(Methods.TextDocumentSemanticTokensFullName, documentVertex.GetId(), semanticTokensResult.GetId(), idFactory);
diff --git a/src/LanguageServer/Protocol/DefaultCapabilitiesProvider.cs b/src/LanguageServer/Protocol/DefaultCapabilitiesProvider.cs
index 07bfdbf829b13..067785580f1e9 100644
--- a/src/LanguageServer/Protocol/DefaultCapabilitiesProvider.cs
+++ b/src/LanguageServer/Protocol/DefaultCapabilitiesProvider.cs
@@ -90,10 +90,13 @@ public ServerCapabilities GetCapabilities(ClientCapabilities clientCapabilities)
// Using only range handling has shown to be more performant than using a combination of full/edits/range
// handling, especially for larger files. With range handling, we only need to compute tokens for whatever
// is in view, while with full/edits handling we need to compute tokens for the entire file and then
- // potentially run a diff between the old and new tokens.
+ // potentially run a diff between the old and new tokens. Therefore, we only enable full handling if
+ // the client does not support ranges.
+ var rangeCapabilities = clientCapabilities.TextDocument?.SemanticTokens?.Requests?.Range;
+ var supportsSemanticTokensRange = rangeCapabilities?.Value is not (false or null);
capabilities.SemanticTokensOptions = new SemanticTokensOptions
{
- Full = false,
+ Full = !supportsSemanticTokensRange,
Range = true,
Legend = new SemanticTokensLegend
{
diff --git a/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs b/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs
index 47caa64f1e160..d329f0ebc21b0 100644
--- a/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs
+++ b/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs
@@ -12,24 +12,20 @@
namespace Microsoft.CodeAnalysis.LanguageServer.ExternalAccess.Razor;
[Method(SemanticRangesMethodName)]
-internal class SemanticTokensRangesHandler : ILspServiceDocumentRequestHandler
+internal sealed class SemanticTokensRangesHandler(
+ IGlobalOptionService globalOptions,
+ SemanticTokensRefreshQueue semanticTokensRefreshQueue)
+ : ILspServiceDocumentRequestHandler
{
public const string SemanticRangesMethodName = "roslyn/semanticTokenRanges";
- private readonly IGlobalOptionService _globalOptions;
- private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue;
+
+ private readonly IGlobalOptionService _globalOptions = globalOptions;
+ private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue = semanticTokensRefreshQueue;
public bool MutatesSolutionState => false;
public bool RequiresLSPSolution => true;
- public SemanticTokensRangesHandler(
- IGlobalOptionService globalOptions,
- SemanticTokensRefreshQueue semanticTokensRefreshQueue)
- {
- _globalOptions = globalOptions;
- _semanticTokenRefreshQueue = semanticTokensRefreshQueue;
- }
-
public TextDocumentIdentifier GetTextDocumentIdentifier(SemanticTokensRangesParams request)
{
Contract.ThrowIfNull(request.TextDocument);
@@ -37,12 +33,14 @@ public TextDocumentIdentifier GetTextDocumentIdentifier(SemanticTokensRangesPara
}
public async Task HandleRequestAsync(
- SemanticTokensRangesParams request,
- RequestContext context,
- CancellationToken cancellationToken)
+ SemanticTokensRangesParams request,
+ RequestContext context,
+ CancellationToken cancellationToken)
{
Contract.ThrowIfNull(request.TextDocument, "TextDocument is null.");
- var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(_globalOptions, _semanticTokenRefreshQueue, request.Ranges, context, cancellationToken).ConfigureAwait(false);
+
+ var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(
+ _globalOptions, _semanticTokenRefreshQueue, request.Ranges, context, cancellationToken).ConfigureAwait(false);
return new SemanticTokens { Data = tokensData };
}
}
diff --git a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandler.cs b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandler.cs
new file mode 100644
index 0000000000000..35c8400eec316
--- /dev/null
+++ b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandler.cs
@@ -0,0 +1,43 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.CodeAnalysis.Options;
+using Roslyn.LanguageServer.Protocol;
+using LSP = Roslyn.LanguageServer.Protocol;
+
+namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens;
+
+[Method(Methods.TextDocumentSemanticTokensFullName)]
+internal sealed class SemanticTokensFullHandler(
+ IGlobalOptionService globalOptions,
+ SemanticTokensRefreshQueue semanticTokensRefreshQueue)
+ : ILspServiceDocumentRequestHandler
+{
+ private readonly IGlobalOptionService _globalOptions = globalOptions;
+ private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue = semanticTokensRefreshQueue;
+
+ public bool MutatesSolutionState => false;
+ public bool RequiresLSPSolution => true;
+
+ public TextDocumentIdentifier GetTextDocumentIdentifier(LSP.SemanticTokensFullParams request)
+ {
+ Contract.ThrowIfNull(request.TextDocument);
+ return request.TextDocument;
+ }
+
+ public async Task HandleRequestAsync(
+ SemanticTokensFullParams request,
+ RequestContext context,
+ CancellationToken cancellationToken)
+ {
+ Contract.ThrowIfNull(request.TextDocument);
+
+ // Passing an null array of ranges will cause the helper to return tokens for the entire document.
+ var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(
+ _globalOptions, _semanticTokenRefreshQueue, ranges: null, context, cancellationToken).ConfigureAwait(false);
+ return new LSP.SemanticTokens { Data = tokensData };
+ }
+}
diff --git a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandlerFactory.cs b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandlerFactory.cs
new file mode 100644
index 0000000000000..0cdf2dfd8e495
--- /dev/null
+++ b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandlerFactory.cs
@@ -0,0 +1,24 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Composition;
+using Microsoft.CodeAnalysis.Host.Mef;
+using Microsoft.CodeAnalysis.Options;
+
+namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens;
+
+[ExportCSharpVisualBasicLspServiceFactory(typeof(SemanticTokensFullHandler)), Shared]
+[method: ImportingConstructor]
+[method: Obsolete(MefConstruction.ImportingConstructorMessage, error: true)]
+internal sealed class SemanticTokensFullHandlerFactory(IGlobalOptionService globalOptions) : ILspServiceFactory
+{
+ private readonly IGlobalOptionService _globalOptions = globalOptions;
+
+ public ILspService CreateILspService(LspServices lspServices, WellKnownLspServerKinds serverKind)
+ {
+ var semanticTokensRefreshQueue = lspServices.GetRequiredService();
+ return new SemanticTokensFullHandler(_globalOptions, semanticTokensRefreshQueue);
+ }
+}
diff --git a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs
index 949eb42c53173..8b290487620c1 100644
--- a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs
+++ b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs
@@ -17,372 +17,359 @@
using Microsoft.CodeAnalysis.Text;
using LSP = Roslyn.LanguageServer.Protocol;
-namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens
+namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens;
+
+internal static class SemanticTokensHelpers
{
- internal static class SemanticTokensHelpers
+ private static readonly ObjectPool> s_tokenListPool = new(() => new List(capacity: 1000));
+
+ /// The ranges to get semantic tokens for. If null then the entire document will be
+ /// processed.
+ internal static async Task HandleRequestHelperAsync(
+ IGlobalOptionService globalOptions,
+ SemanticTokensRefreshQueue semanticTokensRefreshQueue,
+ LSP.Range[]? ranges,
+ RequestContext context,
+ CancellationToken cancellationToken)
{
- private static readonly ObjectPool> s_tokenListPool = new ObjectPool>(() => new List(capacity: 1000));
-
- internal static async Task HandleRequestHelperAsync(
- IGlobalOptionService globalOptions,
- SemanticTokensRefreshQueue semanticTokensRefreshQueue,
- LSP.Range[] ranges,
- RequestContext context,
- CancellationToken cancellationToken)
- {
- if (ranges.Length == 0)
- {
- return [];
- }
+ var contextDocument = context.GetRequiredDocument();
- var contextDocument = context.GetRequiredDocument();
- var project = contextDocument.Project;
- var options = globalOptions.GetClassificationOptions(project.Language);
- var supportsVisualStudioExtensions = context.GetRequiredClientCapabilities().HasVisualStudioLspCapability();
+ // If the client didn't provide any ranges, we'll just return the entire document.
+ var text = await contextDocument.GetTextAsync(cancellationToken).ConfigureAwait(false);
+ ranges ??= [ProtocolConversions.TextSpanToRange(new TextSpan(0, text.Length), text)];
- var spans = new FixedSizeArrayBuilder(ranges.Length);
- foreach (var range in ranges)
- spans.Add(ProtocolConversions.RangeToLinePositionSpan(range));
+ var project = contextDocument.Project;
+ var options = globalOptions.GetClassificationOptions(project.Language);
+ var supportsVisualStudioExtensions = context.GetRequiredClientCapabilities().HasVisualStudioLspCapability();
- var tokensData = await HandleRequestHelperAsync(contextDocument, spans.MoveToImmutable(), supportsVisualStudioExtensions, options, cancellationToken).ConfigureAwait(false);
+ var spans = new FixedSizeArrayBuilder(ranges.Length);
+ foreach (var range in ranges)
+ spans.Add(ProtocolConversions.RangeToLinePositionSpan(range));
- // The above call to get semantic tokens may be inaccurate (because we use frozen partial semantics). Kick
- // off a request to ensure that the OOP side gets a fully up to compilation for this project. Once it does
- // we can optionally choose to notify our caller to do a refresh if we computed a compilation for a new
- // solution snapshot.
- await semanticTokensRefreshQueue.TryEnqueueRefreshComputationAsync(project, cancellationToken).ConfigureAwait(false);
- return tokensData;
- }
+ var tokensData = await HandleRequestHelperAsync(contextDocument, spans.MoveToImmutable(), supportsVisualStudioExtensions, options, cancellationToken).ConfigureAwait(false);
- public static async Task HandleRequestHelperAsync(Document document, ImmutableArray spans, bool supportsVisualStudioExtensions, ClassificationOptions options, CancellationToken cancellationToken)
- {
- // If the full compilation is not yet available, we'll try getting a partial one. It may contain inaccurate
- // results but will speed up how quickly we can respond to the client's request.
- document = document.WithFrozenPartialSemantics(cancellationToken);
- options = options with { FrozenPartialSemantics = true };
-
- // The results from the range handler should not be cached since we don't want to cache
- // partial token results. In addition, a range request is only ever called with a whole
- // document request, so caching range results is unnecessary since the whole document
- // handler will cache the results anyway.
- return await ComputeSemanticTokensDataAsync(
- document,
- spans,
- supportsVisualStudioExtensions,
- options,
- cancellationToken).ConfigureAwait(false);
- }
-
- ///
- /// Returns the semantic tokens data for a given document with an optional ranges.
- ///
- /// Spans to compute tokens for. If empty, the whole document will be used.
- public static async Task ComputeSemanticTokensDataAsync(
- Document document,
- ImmutableArray spans,
- bool supportsVisualStudioExtensions,
- ClassificationOptions options,
- CancellationToken cancellationToken)
- {
- var tokenTypesToIndex = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeToIndex;
- var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
- var text = await document.GetValueTextAsync(cancellationToken).ConfigureAwait(false);
- using var _1 = Classifier.GetPooledList(out var classifiedSpans);
- using var _2 = Classifier.GetPooledList(out var updatedClassifiedSpans);
-
- // We either calculate the tokens for the full document span, or the user
- // can pass in a range from the full document if they wish.
- ImmutableArray textSpans;
- if (spans.Length == 0)
- {
- textSpans = [root.FullSpan];
- }
- else
- {
- var textSpansBuilder = new FixedSizeArrayBuilder(spans.Length);
- foreach (var span in spans)
- textSpansBuilder.Add(text.Lines.GetTextSpan(span));
+ // The above call to get semantic tokens may be inaccurate (because we use frozen partial semantics). Kick
+ // off a request to ensure that the OOP side gets a fully up to compilation for this project. Once it does
+ // we can optionally choose to notify our caller to do a refresh if we computed a compilation for a new
+ // solution snapshot.
+ await semanticTokensRefreshQueue.TryEnqueueRefreshComputationAsync(project, cancellationToken).ConfigureAwait(false);
+ return tokensData;
+ }
- textSpans = textSpansBuilder.MoveToImmutable();
- }
+ public static async Task HandleRequestHelperAsync(
+ Document document, ImmutableArray spans, bool supportsVisualStudioExtensions, ClassificationOptions options, CancellationToken cancellationToken)
+ {
+ // If the full compilation is not yet available, we'll try getting a partial one. It may contain inaccurate
+ // results but will speed up how quickly we can respond to the client's request.
+ document = document.WithFrozenPartialSemantics(cancellationToken);
+ options = options with { FrozenPartialSemantics = true };
+
+ // The results from the range handler should not be cached since we don't want to cache
+ // partial token results. In addition, a range request is only ever called with a whole
+ // document request, so caching range results is unnecessary since the whole document
+ // handler will cache the results anyway.
+ return await ComputeSemanticTokensDataAsync(
+ document,
+ spans,
+ supportsVisualStudioExtensions,
+ options,
+ cancellationToken).ConfigureAwait(false);
+ }
- await GetClassifiedSpansForDocumentAsync(
- classifiedSpans, document, textSpans, options, cancellationToken).ConfigureAwait(false);
+ ///
+ /// Returns the semantic tokens data for a given document with an optional ranges.
+ ///
+ /// Spans to compute tokens for.
+ public static async Task ComputeSemanticTokensDataAsync(
+ Document document,
+ ImmutableArray spans,
+ bool supportsVisualStudioExtensions,
+ ClassificationOptions options,
+ CancellationToken cancellationToken)
+ {
+ var tokenTypesToIndex = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeToIndex;
+ var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
+ var text = await document.GetValueTextAsync(cancellationToken).ConfigureAwait(false);
+ using var _1 = Classifier.GetPooledList(out var classifiedSpans);
+ using var _2 = Classifier.GetPooledList(out var updatedClassifiedSpans);
+
+ var textSpans = spans.SelectAsArray(static (span, text) => text.Lines.GetTextSpan(span), text);
+ await GetClassifiedSpansForDocumentAsync(
+ classifiedSpans, document, textSpans, options, cancellationToken).ConfigureAwait(false);
+
+ // Classified spans are not guaranteed to be returned in a certain order so we sort them to be safe.
+ classifiedSpans.Sort(ClassifiedSpanComparer.Instance);
+
+ // Multi-line tokens are not supported by VS (tracked by https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1265495).
+ // Roslyn's classifier however can return multi-line classified spans, so we must break these up into single-line spans.
+ ConvertMultiLineToSingleLineSpans(text, classifiedSpans, updatedClassifiedSpans);
+
+ // TO-DO: We should implement support for streaming if LSP adds support for it:
+ // https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300
+ return ComputeTokens(text.Lines, updatedClassifiedSpans, supportsVisualStudioExtensions, tokenTypesToIndex);
+ }
- // Classified spans are not guaranteed to be returned in a certain order so we sort them to be safe.
- classifiedSpans.Sort(ClassifiedSpanComparer.Instance);
+ private static async Task GetClassifiedSpansForDocumentAsync(
+ SegmentedList classifiedSpans,
+ Document document,
+ ImmutableArray textSpans,
+ ClassificationOptions options,
+ CancellationToken cancellationToken)
+ {
+ var classificationService = document.GetRequiredLanguageService();
- // Multi-line tokens are not supported by VS (tracked by https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1265495).
- // Roslyn's classifier however can return multi-line classified spans, so we must break these up into single-line spans.
- ConvertMultiLineToSingleLineSpans(text, classifiedSpans, updatedClassifiedSpans);
+ // We always return both syntactic and semantic classifications. If there is a syntactic classifier running on the client
+ // then the semantic token classifications will override them.
- // TO-DO: We should implement support for streaming if LSP adds support for it:
- // https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300
- return ComputeTokens(text.Lines, updatedClassifiedSpans, supportsVisualStudioExtensions, tokenTypesToIndex);
- }
+ // `includeAdditiveSpans` will add token modifiers such as 'static', which we want to include in LSP.
+ var spans = await ClassifierHelper.GetClassifiedSpansAsync(
+ document, textSpans, options, includeAdditiveSpans: true, cancellationToken).ConfigureAwait(false);
- private static async Task GetClassifiedSpansForDocumentAsync(
- SegmentedList classifiedSpans,
- Document document,
- ImmutableArray textSpans,
- ClassificationOptions options,
- CancellationToken cancellationToken)
- {
- var classificationService = document.GetRequiredLanguageService();
+ // The spans returned to us may include some empty spans, which we don't care about. We also don't care
+ // about the 'text' classification. It's added for everything between real classifications (including
+ // whitespace), and just means 'don't classify this'. No need for us to actually include that in
+ // semantic tokens as it just wastes space in the result.
+ var nonEmptySpans = spans.Where(s => !s.TextSpan.IsEmpty && s.ClassificationType != ClassificationTypeNames.Text);
+ classifiedSpans.AddRange(nonEmptySpans);
+ }
- // We always return both syntactic and semantic classifications. If there is a syntactic classifier running on the client
- // then the semantic token classifications will override them.
+ private static void ConvertMultiLineToSingleLineSpans(SourceText text, SegmentedList classifiedSpans, SegmentedList updatedClassifiedSpans)
+ {
- // `includeAdditiveSpans` will add token modifiers such as 'static', which we want to include in LSP.
- var spans = await ClassifierHelper.GetClassifiedSpansAsync(
- document, textSpans, options, includeAdditiveSpans: true, cancellationToken).ConfigureAwait(false);
+ for (var spanIndex = 0; spanIndex < classifiedSpans.Count; spanIndex++)
+ {
+ var span = classifiedSpans[spanIndex];
+ text.GetLinesAndOffsets(span.TextSpan, out var startLine, out var startOffset, out var endLine, out var endOffSet);
- // The spans returned to us may include some empty spans, which we don't care about. We also don't care
- // about the 'text' classification. It's added for everything between real classifications (including
- // whitespace), and just means 'don't classify this'. No need for us to actually include that in
- // semantic tokens as it just wastes space in the result.
- var nonEmptySpans = spans.Where(s => !s.TextSpan.IsEmpty && s.ClassificationType != ClassificationTypeNames.Text);
- classifiedSpans.AddRange(nonEmptySpans);
+ // If the start and end of the classified span are not on the same line, we're dealing with a multi-line span.
+ // Since VS doesn't support multi-line spans/tokens, we need to break the span up into single-line spans.
+ if (startLine != endLine)
+ {
+ ConvertToSingleLineSpan(
+ text, classifiedSpans, updatedClassifiedSpans, ref spanIndex, span.ClassificationType,
+ startLine, startOffset, endLine, endOffSet);
+ }
+ else
+ {
+ // This is already a single-line span, so no modification is necessary.
+ updatedClassifiedSpans.Add(span);
+ }
}
- private static void ConvertMultiLineToSingleLineSpans(SourceText text, SegmentedList classifiedSpans, SegmentedList updatedClassifiedSpans)
+ static void ConvertToSingleLineSpan(
+ SourceText text,
+ SegmentedList originalClassifiedSpans,
+ SegmentedList updatedClassifiedSpans,
+ ref int spanIndex,
+ string classificationType,
+ int startLine,
+ int startOffset,
+ int endLine,
+ int endOffSet)
{
+ var numLinesInSpan = endLine - startLine + 1;
+ Contract.ThrowIfTrue(numLinesInSpan < 1);
- for (var spanIndex = 0; spanIndex < classifiedSpans.Count; spanIndex++)
+ for (var currentLine = 0; currentLine < numLinesInSpan; currentLine++)
{
- var span = classifiedSpans[spanIndex];
- text.GetLinesAndOffsets(span.TextSpan, out var startLine, out var startOffset, out var endLine, out var endOffSet);
+ TextSpan textSpan;
+ var line = text.Lines[startLine + currentLine];
- // If the start and end of the classified span are not on the same line, we're dealing with a multi-line span.
- // Since VS doesn't support multi-line spans/tokens, we need to break the span up into single-line spans.
- if (startLine != endLine)
+ // Case 1: First line of span
+ if (currentLine == 0)
+ {
+ var absoluteStart = line.Start + startOffset;
+
+ // This start could be past the regular end of the line if it's within the newline character if we have a CRLF newline. In that case, just skip emitting a span for the LF.
+ // One example where this could happen is an embedded regular expression that we're classifying; regular expression comments contained within a multi-line string
+ // contain the carriage return but not the linefeed, so the linefeed could be the start of the next classification.
+ textSpan = TextSpan.FromBounds(Math.Min(absoluteStart, line.End), line.End);
+ }
+ // Case 2: Any of the span's middle lines
+ else if (currentLine != numLinesInSpan - 1)
{
- ConvertToSingleLineSpan(
- text, classifiedSpans, updatedClassifiedSpans, ref spanIndex, span.ClassificationType,
- startLine, startOffset, endLine, endOffSet);
+ textSpan = line.Span;
}
+ // Case 3: Last line of span
else
{
- // This is already a single-line span, so no modification is necessary.
- updatedClassifiedSpans.Add(span);
+ textSpan = new TextSpan(line.Start, endOffSet);
}
- }
- static void ConvertToSingleLineSpan(
- SourceText text,
- SegmentedList originalClassifiedSpans,
- SegmentedList updatedClassifiedSpans,
- ref int spanIndex,
- string classificationType,
- int startLine,
- int startOffset,
- int endLine,
- int endOffSet)
- {
- var numLinesInSpan = endLine - startLine + 1;
- Contract.ThrowIfTrue(numLinesInSpan < 1);
+ // Omit 0-length spans created in this fashion.
+ if (textSpan.Length > 0)
+ {
+ var updatedClassifiedSpan = new ClassifiedSpan(textSpan, classificationType);
+ updatedClassifiedSpans.Add(updatedClassifiedSpan);
+ }
- for (var currentLine = 0; currentLine < numLinesInSpan; currentLine++)
+ // Since spans are expected to be ordered, when breaking up a multi-line span, we may have to insert
+ // other spans in-between. For example, we may encounter this case when breaking up a multi-line verbatim
+ // string literal containing escape characters:
+ // var x = @"one ""
+ // two";
+ // The check below ensures we correctly return the spans in the correct order, i.e. 'one', '""', 'two'.
+ while (spanIndex + 1 < originalClassifiedSpans.Count &&
+ textSpan.Contains(originalClassifiedSpans[spanIndex + 1].TextSpan))
{
- TextSpan textSpan;
- var line = text.Lines[startLine + currentLine];
-
- // Case 1: First line of span
- if (currentLine == 0)
- {
- var absoluteStart = line.Start + startOffset;
-
- // This start could be past the regular end of the line if it's within the newline character if we have a CRLF newline. In that case, just skip emitting a span for the LF.
- // One example where this could happen is an embedded regular expression that we're classifying; regular expression comments contained within a multi-line string
- // contain the carriage return but not the linefeed, so the linefeed could be the start of the next classification.
- textSpan = TextSpan.FromBounds(Math.Min(absoluteStart, line.End), line.End);
- }
- // Case 2: Any of the span's middle lines
- else if (currentLine != numLinesInSpan - 1)
- {
- textSpan = line.Span;
- }
- // Case 3: Last line of span
- else
- {
- textSpan = new TextSpan(line.Start, endOffSet);
- }
-
- // Omit 0-length spans created in this fashion.
- if (textSpan.Length > 0)
- {
- var updatedClassifiedSpan = new ClassifiedSpan(textSpan, classificationType);
- updatedClassifiedSpans.Add(updatedClassifiedSpan);
- }
-
- // Since spans are expected to be ordered, when breaking up a multi-line span, we may have to insert
- // other spans in-between. For example, we may encounter this case when breaking up a multi-line verbatim
- // string literal containing escape characters:
- // var x = @"one ""
- // two";
- // The check below ensures we correctly return the spans in the correct order, i.e. 'one', '""', 'two'.
- while (spanIndex + 1 < originalClassifiedSpans.Count &&
- textSpan.Contains(originalClassifiedSpans[spanIndex + 1].TextSpan))
- {
- updatedClassifiedSpans.Add(originalClassifiedSpans[spanIndex + 1]);
- spanIndex++;
- }
+ updatedClassifiedSpans.Add(originalClassifiedSpans[spanIndex + 1]);
+ spanIndex++;
}
}
}
+ }
- private static int[] ComputeTokens(
- TextLineCollection lines,
- SegmentedList classifiedSpans,
- bool supportsVisualStudioExtensions,
- IReadOnlyDictionary tokenTypesToIndex)
- {
- // We keep track of the last line number and last start character since tokens are
- // reported relative to each other.
- var lastLineNumber = 0;
- var lastStartCharacter = 0;
-
- var tokenTypeMap = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeMap;
+ private static int[] ComputeTokens(
+ TextLineCollection lines,
+ SegmentedList classifiedSpans,
+ bool supportsVisualStudioExtensions,
+ IReadOnlyDictionary tokenTypesToIndex)
+ {
+ // We keep track of the last line number and last start character since tokens are
+ // reported relative to each other.
+ var lastLineNumber = 0;
+ var lastStartCharacter = 0;
- using var pooledData = s_tokenListPool.GetPooledObject();
- var data = pooledData.Object;
+ var tokenTypeMap = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeMap;
- // Items in the pool may not have been cleared
- data.Clear();
+ using var pooledData = s_tokenListPool.GetPooledObject();
+ var data = pooledData.Object;
- for (var currentClassifiedSpanIndex = 0; currentClassifiedSpanIndex < classifiedSpans.Count; currentClassifiedSpanIndex++)
- {
- currentClassifiedSpanIndex = ComputeNextToken(
- lines, ref lastLineNumber, ref lastStartCharacter, classifiedSpans,
- currentClassifiedSpanIndex, tokenTypeMap, tokenTypesToIndex,
- out var deltaLine, out var startCharacterDelta, out var tokenLength,
- out var tokenType, out var tokenModifiers);
-
- data.Add(deltaLine);
- data.Add(startCharacterDelta);
- data.Add(tokenLength);
- data.Add(tokenType);
- data.Add(tokenModifiers);
- }
+ // Items in the pool may not have been cleared
+ data.Clear();
- return [.. data];
+ for (var currentClassifiedSpanIndex = 0; currentClassifiedSpanIndex < classifiedSpans.Count; currentClassifiedSpanIndex++)
+ {
+ currentClassifiedSpanIndex = ComputeNextToken(
+ lines, ref lastLineNumber, ref lastStartCharacter, classifiedSpans,
+ currentClassifiedSpanIndex, tokenTypeMap, tokenTypesToIndex,
+ out var deltaLine, out var startCharacterDelta, out var tokenLength,
+ out var tokenType, out var tokenModifiers);
+
+ data.Add(deltaLine);
+ data.Add(startCharacterDelta);
+ data.Add(tokenLength);
+ data.Add(tokenType);
+ data.Add(tokenModifiers);
}
- private static int ComputeNextToken(
- TextLineCollection lines,
- ref int lastLineNumber,
- ref int lastStartCharacter,
- SegmentedList classifiedSpans,
- int currentClassifiedSpanIndex,
- IReadOnlyDictionary tokenTypeMap,
- IReadOnlyDictionary tokenTypesToIndex,
- out int deltaLineOut,
- out int startCharacterDeltaOut,
- out int tokenLengthOut,
- out int tokenTypeOut,
- out int tokenModifiersOut)
+ return [.. data];
+ }
+
+ private static int ComputeNextToken(
+ TextLineCollection lines,
+ ref int lastLineNumber,
+ ref int lastStartCharacter,
+ SegmentedList classifiedSpans,
+ int currentClassifiedSpanIndex,
+ IReadOnlyDictionary tokenTypeMap,
+ IReadOnlyDictionary tokenTypesToIndex,
+ out int deltaLineOut,
+ out int startCharacterDeltaOut,
+ out int tokenLengthOut,
+ out int tokenTypeOut,
+ out int tokenModifiersOut)
+ {
+ // Each semantic token is represented in LSP by five numbers:
+ // 1. Token line number delta, relative to the previous token
+ // 2. Token start character delta, relative to the previous token
+ // 3. Token length
+ // 4. Token type (index) - looked up in SemanticTokensLegend.tokenTypes
+ // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
+
+ var classifiedSpan = classifiedSpans[currentClassifiedSpanIndex];
+ var originalTextSpan = classifiedSpan.TextSpan;
+ var linePosition = lines.GetLinePositionSpan(originalTextSpan).Start;
+ var lineNumber = linePosition.Line;
+
+ // 1. Token line number delta, relative to the previous token
+ var deltaLine = lineNumber - lastLineNumber;
+ Contract.ThrowIfTrue(deltaLine < 0, $"deltaLine is less than 0: {deltaLine}");
+
+ // 2. Token start character delta, relative to the previous token
+ // (Relative to 0 or the previous token’s start if they're on the same line)
+ var deltaStartCharacter = linePosition.Character;
+ if (lastLineNumber == lineNumber)
{
- // Each semantic token is represented in LSP by five numbers:
- // 1. Token line number delta, relative to the previous token
- // 2. Token start character delta, relative to the previous token
- // 3. Token length
- // 4. Token type (index) - looked up in SemanticTokensLegend.tokenTypes
- // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
-
- var classifiedSpan = classifiedSpans[currentClassifiedSpanIndex];
- var originalTextSpan = classifiedSpan.TextSpan;
- var linePosition = lines.GetLinePositionSpan(originalTextSpan).Start;
- var lineNumber = linePosition.Line;
-
- // 1. Token line number delta, relative to the previous token
- var deltaLine = lineNumber - lastLineNumber;
- Contract.ThrowIfTrue(deltaLine < 0, $"deltaLine is less than 0: {deltaLine}");
-
- // 2. Token start character delta, relative to the previous token
- // (Relative to 0 or the previous token’s start if they're on the same line)
- var deltaStartCharacter = linePosition.Character;
- if (lastLineNumber == lineNumber)
- {
- deltaStartCharacter -= lastStartCharacter;
- }
+ deltaStartCharacter -= lastStartCharacter;
+ }
- lastLineNumber = lineNumber;
- lastStartCharacter = linePosition.Character;
+ lastLineNumber = lineNumber;
+ lastStartCharacter = linePosition.Character;
- // 3. Token length
- var tokenLength = originalTextSpan.Length;
- Contract.ThrowIfFalse(tokenLength > 0);
+ // 3. Token length
+ var tokenLength = originalTextSpan.Length;
+ Contract.ThrowIfFalse(tokenLength > 0);
- // We currently only have one modifier (static). The logic below will need to change in the future if other
- // modifiers are added in the future.
- var modifierBits = TokenModifiers.None;
- var tokenTypeIndex = 0;
+ // We currently only have one modifier (static). The logic below will need to change in the future if other
+ // modifiers are added in the future.
+ var modifierBits = TokenModifiers.None;
+ var tokenTypeIndex = 0;
- // Classified spans with the same text span should be combined into one token.
- while (classifiedSpans[currentClassifiedSpanIndex].TextSpan == originalTextSpan)
+ // Classified spans with the same text span should be combined into one token.
+ while (classifiedSpans[currentClassifiedSpanIndex].TextSpan == originalTextSpan)
+ {
+ var classificationType = classifiedSpans[currentClassifiedSpanIndex].ClassificationType;
+ if (classificationType == ClassificationTypeNames.StaticSymbol)
{
- var classificationType = classifiedSpans[currentClassifiedSpanIndex].ClassificationType;
- if (classificationType == ClassificationTypeNames.StaticSymbol)
- {
- // 4. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
- modifierBits |= TokenModifiers.Static;
- }
- else if (classificationType == ClassificationTypeNames.ReassignedVariable)
- {
- // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
- modifierBits |= TokenModifiers.ReassignedVariable;
- }
- else if (classificationType == ClassificationTypeNames.ObsoleteSymbol)
- {
- // 6. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
- modifierBits |= TokenModifiers.Deprecated;
- }
- else if (classificationType == ClassificationTypeNames.TestCode)
- {
- // Skip additive types that are not being converted to token modifiers.
- }
- else
- {
- // 7. Token type - looked up in SemanticTokensLegend.tokenTypes (language server defined mapping
- // from integer to LSP token types).
- tokenTypeIndex = GetTokenTypeIndex(classificationType);
- }
-
- // Break out of the loop if we have no more classified spans left, or if the next classified span has
- // a different text span than our current text span.
- if (currentClassifiedSpanIndex + 1 >= classifiedSpans.Count || classifiedSpans[currentClassifiedSpanIndex + 1].TextSpan != originalTextSpan)
- {
- break;
- }
+ // 4. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
+ modifierBits |= TokenModifiers.Static;
+ }
+ else if (classificationType == ClassificationTypeNames.ReassignedVariable)
+ {
+ // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
+ modifierBits |= TokenModifiers.ReassignedVariable;
+ }
+ else if (classificationType == ClassificationTypeNames.ObsoleteSymbol)
+ {
+ // 6. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
+ modifierBits |= TokenModifiers.Deprecated;
+ }
+ else if (classificationType == ClassificationTypeNames.TestCode)
+ {
+ // Skip additive types that are not being converted to token modifiers.
+ }
+ else
+ {
+ // 7. Token type - looked up in SemanticTokensLegend.tokenTypes (language server defined mapping
+ // from integer to LSP token types).
+ tokenTypeIndex = GetTokenTypeIndex(classificationType);
+ }
- currentClassifiedSpanIndex++;
+ // Break out of the loop if we have no more classified spans left, or if the next classified span has
+ // a different text span than our current text span.
+ if (currentClassifiedSpanIndex + 1 >= classifiedSpans.Count || classifiedSpans[currentClassifiedSpanIndex + 1].TextSpan != originalTextSpan)
+ {
+ break;
}
- deltaLineOut = deltaLine;
- startCharacterDeltaOut = deltaStartCharacter;
- tokenLengthOut = tokenLength;
- tokenTypeOut = tokenTypeIndex;
- tokenModifiersOut = (int)modifierBits;
+ currentClassifiedSpanIndex++;
+ }
- return currentClassifiedSpanIndex;
+ deltaLineOut = deltaLine;
+ startCharacterDeltaOut = deltaStartCharacter;
+ tokenLengthOut = tokenLength;
+ tokenTypeOut = tokenTypeIndex;
+ tokenModifiersOut = (int)modifierBits;
- int GetTokenTypeIndex(string classificationType)
- {
- if (!tokenTypeMap.TryGetValue(classificationType, out var tokenTypeStr))
- {
- tokenTypeStr = classificationType;
- }
+ return currentClassifiedSpanIndex;
- Contract.ThrowIfFalse(tokenTypesToIndex.TryGetValue(tokenTypeStr, out var tokenTypeIndex), "No matching token type index found.");
- return tokenTypeIndex;
+ int GetTokenTypeIndex(string classificationType)
+ {
+ if (!tokenTypeMap.TryGetValue(classificationType, out var tokenTypeStr))
+ {
+ tokenTypeStr = classificationType;
}
+
+ Contract.ThrowIfFalse(tokenTypesToIndex.TryGetValue(tokenTypeStr, out var tokenTypeIndex), "No matching token type index found.");
+ return tokenTypeIndex;
}
+ }
- private class ClassifiedSpanComparer : IComparer
- {
- public static readonly ClassifiedSpanComparer Instance = new();
+ private class ClassifiedSpanComparer : IComparer
+ {
+ public static readonly ClassifiedSpanComparer Instance = new();
- public int Compare(ClassifiedSpan x, ClassifiedSpan y) => x.TextSpan.CompareTo(y.TextSpan);
- }
+ public int Compare(ClassifiedSpan x, ClassifiedSpan y) => x.TextSpan.CompareTo(y.TextSpan);
}
}
diff --git a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs
index 9173376a56c1b..4aa2df67382c2 100644
--- a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs
+++ b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs
@@ -8,40 +8,34 @@
using Roslyn.LanguageServer.Protocol;
using LSP = Roslyn.LanguageServer.Protocol;
-namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens
+namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens;
+
+[Method(Methods.TextDocumentSemanticTokensRangeName)]
+internal sealed class SemanticTokensRangeHandler(
+ IGlobalOptionService globalOptions,
+ SemanticTokensRefreshQueue semanticTokensRefreshQueue) : ILspServiceDocumentRequestHandler
{
- [Method(Methods.TextDocumentSemanticTokensRangeName)]
- internal class SemanticTokensRangeHandler : ILspServiceDocumentRequestHandler
- {
- private readonly IGlobalOptionService _globalOptions;
- private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue;
+ private readonly IGlobalOptionService _globalOptions = globalOptions;
+ private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue = semanticTokensRefreshQueue;
- public bool MutatesSolutionState => false;
- public bool RequiresLSPSolution => true;
+ public bool MutatesSolutionState => false;
+ public bool RequiresLSPSolution => true;
- public SemanticTokensRangeHandler(
- IGlobalOptionService globalOptions,
- SemanticTokensRefreshQueue semanticTokensRefreshQueue)
- {
- _globalOptions = globalOptions;
- _semanticTokenRefreshQueue = semanticTokensRefreshQueue;
- }
+ public TextDocumentIdentifier GetTextDocumentIdentifier(SemanticTokensRangeParams request)
+ {
+ Contract.ThrowIfNull(request.TextDocument);
+ return request.TextDocument;
+ }
- public TextDocumentIdentifier GetTextDocumentIdentifier(LSP.SemanticTokensRangeParams request)
- {
- Contract.ThrowIfNull(request.TextDocument);
- return request.TextDocument;
- }
+ public async Task HandleRequestAsync(
+ SemanticTokensRangeParams request,
+ RequestContext context,
+ CancellationToken cancellationToken)
+ {
+ Contract.ThrowIfNull(request.TextDocument, "TextDocument is null.");
- public async Task HandleRequestAsync(
- SemanticTokensRangeParams request,
- RequestContext context,
- CancellationToken cancellationToken)
- {
- Contract.ThrowIfNull(request.TextDocument, "TextDocument is null.");
- var ranges = new[] { request.Range };
- var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(_globalOptions, _semanticTokenRefreshQueue, ranges, context, cancellationToken).ConfigureAwait(false);
- return new LSP.SemanticTokens { Data = tokensData };
- }
+ var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(
+ _globalOptions, _semanticTokenRefreshQueue, [request.Range], context, cancellationToken).ConfigureAwait(false);
+ return new LSP.SemanticTokens { Data = tokensData };
}
}
diff --git a/src/LanguageServer/Protocol/Protocol/SemanticTokens/SemanticTokensFullParams.cs b/src/LanguageServer/Protocol/Protocol/SemanticTokens/SemanticTokensFullParams.cs
new file mode 100644
index 0000000000000..ab4a5cb8300d7
--- /dev/null
+++ b/src/LanguageServer/Protocol/Protocol/SemanticTokens/SemanticTokensFullParams.cs
@@ -0,0 +1,34 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+using System;
+using System.Text.Json.Serialization;
+
+namespace Roslyn.LanguageServer.Protocol;
+
+///
+/// Parameters for 'textDocument/semanticTokens/full' request.
+///
+/// See the Language Server Protocol specification for additional information.
+///
+///
+/// Since LSP 3.16
+internal sealed class SemanticTokensFullParams : ITextDocumentParams, IWorkDoneProgressParams, IPartialResultParams
+{
+ ///
+ /// Gets or sets an identifier for the document to fetch semantic tokens from.
+ ///
+ [JsonPropertyName("textDocument")]
+ [JsonRequired]
+ public TextDocumentIdentifier TextDocument { get; set; }
+
+ ///
+ [JsonPropertyName(Methods.WorkDoneTokenName)]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IProgress? WorkDoneToken { get; set; }
+
+ ///
+ [JsonPropertyName(Methods.PartialResultTokenName)]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IProgress? PartialResultToken { get; set; }
+}
diff --git a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/AbstractSemanticTokensTests.cs b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/AbstractSemanticTokensTests.cs
index 007d6d4fbcfac..ef45a583c019b 100644
--- a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/AbstractSemanticTokensTests.cs
+++ b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/AbstractSemanticTokensTests.cs
@@ -27,6 +27,14 @@ protected AbstractSemanticTokensTests(ITestOutputHelper testOutputHelper) : base
private protected static IReadOnlyDictionary GetTokenTypeToIndex(TestLspServer server)
=> SemanticTokensSchema.GetSchema(server.ClientCapabilities.HasVisualStudioLspCapability()).TokenTypeToIndex;
+ private protected static async Task RunGetSemanticTokensFullAsync(TestLspServer testLspServer, LSP.Location caret)
+ {
+ var result = await testLspServer.ExecuteRequestAsync(LSP.Methods.TextDocumentSemanticTokensFullName,
+ CreateSemanticTokensFullParams(caret), CancellationToken.None);
+ Contract.ThrowIfNull(result);
+ return result;
+ }
+
private protected static async Task RunGetSemanticTokensRangeAsync(TestLspServer testLspServer, LSP.Location caret, LSP.Range range)
{
var result = await testLspServer.ExecuteRequestAsync(LSP.Methods.TextDocumentSemanticTokensRangeName,
@@ -43,6 +51,12 @@ private protected static IReadOnlyDictionary GetTokenTypeToIndex(Te
return result;
}
+ private static LSP.SemanticTokensFullParams CreateSemanticTokensFullParams(LSP.Location caret)
+ => new LSP.SemanticTokensFullParams
+ {
+ TextDocument = new LSP.TextDocumentIdentifier { Uri = caret.Uri }
+ };
+
private static LSP.SemanticTokensRangeParams CreateSemanticTokensRangeParams(LSP.Location caret, LSP.Range range)
=> new LSP.SemanticTokensRangeParams
{
diff --git a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensFullTests.cs b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensFullTests.cs
new file mode 100644
index 0000000000000..8d9496ebf7700
--- /dev/null
+++ b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensFullTests.cs
@@ -0,0 +1,67 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System.Linq;
+using System.Threading.Tasks;
+using Microsoft.CodeAnalysis.Classification;
+using Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens;
+using Roslyn.LanguageServer.Protocol;
+using Roslyn.Test.Utilities;
+using Xunit;
+using Xunit.Abstractions;
+using LSP = Roslyn.LanguageServer.Protocol;
+
+#pragma warning disable format // We want to force explicit column spacing within the collection literals in this file, so we disable formatting.
+
+namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens;
+
+public sealed class SemanticTokensFullTests(ITestOutputHelper testOutputHelper) : AbstractSemanticTokensTests(testOutputHelper)
+{
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensFull_FullDocAsync(bool mutatingLspWorkspace, bool isVS)
+ {
+ var markup =
+ """
+ {|caret:|}// Comment
+ static class C { }
+
+ """;
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
+
+ var results = await RunGetSemanticTokensFullAsync(testLspServer, testLspServer.GetLocations("caret").First());
+
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
+ else
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
+
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data));
+ }
+}
diff --git a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs
index 34ff01d65aa4e..029e55d893df5 100644
--- a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs
+++ b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs
@@ -17,581 +17,592 @@
#pragma warning disable format // We want to force explicit column spacing within the collection literals in this file, so we disable formatting.
-namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens
+namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens;
+
+public sealed class SemanticTokensRangeTests(ITestOutputHelper testOutputHelper)
+ : AbstractSemanticTokensTests(testOutputHelper)
{
- public class SemanticTokensRangeTests : AbstractSemanticTokensTests
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensRange_FullDocAsync(bool mutatingLspWorkspace, bool isVS)
{
- public SemanticTokensRangeTests(ITestOutputHelper testOutputHelper) : base(testOutputHelper)
+ var markup =
+ """
+ {|caret:|}// Comment
+ static class C { }
+
+ """;
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
+
+ var range = new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) };
+ var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range);
+
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
{
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
}
-
- [Theory, CombinatorialData]
- public async Task TestGetSemanticTokensRange_FullDocAsync(bool mutatingLspWorkspace, bool isVS)
+ else
{
- var markup =
-@"{|caret:|}// Comment
-static class C { }
-";
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
-
- var range = new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) };
- var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range);
-
- var expectedResults = new LSP.SemanticTokens();
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
- 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
- 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
- else
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
+
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data));
+ }
+
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensRanges_ComputesTokensWithMultipleRanges(bool mutatingLspWorkspace, bool isVS)
+ {
+ // Razor docs should be returning semantic + syntactic results.
+ var markup =
+ """
+ {|caret:|}//
+ #pragma warning disable 1591
+ namespace Razor
{
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
- 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
- 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
+ #line hidden
+ public class Template
+ {
+ #pragma warning disable 219
+ private void __RazorDirectiveTokenHelpers__() {
+ ((global::System.Action)(() => {
+ #nullable restore
+ #line 1 "test.cshtml"
+ var z = 1;
+
+ #line default
+ #line hidden
+ #nullable disable
+ }
+ ))();
+ }
+ #pragma warning restore 219
+ #pragma warning disable 0414
+ private static object __o = null;
+ #pragma warning restore 0414
+ #pragma warning disable 1998
+ public async override global::System.Threading.Tasks.Task ExecuteAsync()
+ {
+ #nullable restore
+ #line 2 "test.cshtml"
+ var x =
+
+ #line default
+ #line hidden
+ #nullable disable
+ }
+ #pragma warning restore 1998
+ }
}
+ #pragma warning restore 1591
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data));
- }
+ """;
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
+
+ var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
+ ImmutableArray spans = [
+ new LinePositionSpan(new LinePosition(12, 0), new LinePosition(13, 0)),
+ new LinePositionSpan(new LinePosition(29, 0), new LinePosition(30, 0)),
+ ];
+
+ var options = ClassificationOptions.Default;
+ var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
+ document, spans, isVS, options, CancellationToken.None);
- [Theory, CombinatorialData]
- public async Task TestGetSemanticTokensRanges_ComputesTokensWithMultipleRanges(bool mutatingLspWorkspace, bool isVS)
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
{
- // Razor docs should be returning semantic + syntactic results.
- var markup =
-@"{|caret:|}//
-#pragma warning disable 1591
-namespace Razor
-{
- #line hidden
- public class Template
- {
- #pragma warning disable 219
- private void __RazorDirectiveTokenHelpers__() {
- ((global::System.Action)(() => {
-#nullable restore
-#line 1 ""test.cshtml""
-var z = 1;
-
-#line default
-#line hidden
-#nullable disable
- }
- ))();
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 12, 0, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'z'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 17, 3, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ ];
}
- #pragma warning restore 219
- #pragma warning disable 0414
- private static object __o = null;
- #pragma warning restore 0414
- #pragma warning disable 1998
- public async override global::System.Threading.Tasks.Task ExecuteAsync()
+ else
{
-#nullable restore
-#line 2 ""test.cshtml""
- var x =
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 12, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'z'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 17, 3, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ ];
+ }
-#line default
-#line hidden
-#nullable disable
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
+ }
+
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensRange_PartialDocAsync(bool mutatingLspWorkspace, bool isVS)
+ {
+ // Razor docs should be returning semantic + syntactic results.
+ var markup =
+ """
+ {|caret:|}// Comment
+ static class C { }
+
+ """;
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
+
+ var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
+ ImmutableArray spans = [new LinePositionSpan(new LinePosition(1, 0), new LinePosition(2, 0))];
+ var options = ClassificationOptions.Default;
+ var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
+ document, spans, isVS, options, CancellationToken.None);
+
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
}
- #pragma warning restore 1998
+ else
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
+
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
}
-}
-#pragma warning restore 1591
-";
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
-
- var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
- ImmutableArray spans = [
- new LinePositionSpan(new LinePosition(12, 0), new LinePosition(13, 0)),
- new LinePositionSpan(new LinePosition(29, 0), new LinePosition(30, 0)),
+
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensRange_MultiLineComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
+ {
+ // Testing as a Razor doc so we get both syntactic + semantic results; otherwise the results would be empty.
+ var markup =
+ """
+ {|caret:|}class C { /* one
+
+ two
+ three */ }
+
+ """;
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
+
+ var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
+ ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(4, 0))];
+ var options = ClassificationOptions.Default;
+ var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
+ document, spans, isVS, options, CancellationToken.None);
+
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one'
+ 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two'
+ 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */'
+ 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
+ else
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one'
+ 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two'
+ 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */'
+ 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
];
+ }
- var options = ClassificationOptions.Default;
- var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
- document, spans, isVS, options, CancellationToken.None);
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
+ }
- var expectedResults = new LSP.SemanticTokens();
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 12, 0, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'z'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 17, 3, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- ];
- }
- else
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensRange_StringLiteral_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
+ {
+ var markup =
+ """
+ {|caret:|}class C
{
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 12, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'z'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 17, 3, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- ];
+ void M()
+ {
+ var x = @"one
+ two ""
+ three";
+ }
}
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
- }
+ """;
- [Theory, CombinatorialData]
- public async Task TestGetSemanticTokensRange_PartialDocAsync(bool mutatingLspWorkspace, bool isVS)
- {
- // Razor docs should be returning semantic + syntactic results.
- var markup =
-@"{|caret:|}// Comment
-static class C { }
-";
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
-
- var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
- ImmutableArray spans = [new LinePositionSpan(new LinePosition(1, 0), new LinePosition(2, 0))];
- var options = ClassificationOptions.Default;
- var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
- document, spans, isVS, options, CancellationToken.None);
-
- var expectedResults = new LSP.SemanticTokens();
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
- 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
- else
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
- 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
- }
+ var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
+ ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))];
+ var options = ClassificationOptions.Default;
+ var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
+ document, spans, isVS, options, CancellationToken.None);
- [Theory, CombinatorialData]
- public async Task TestGetSemanticTokensRange_MultiLineComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
{
- // Testing as a Razor doc so we get both syntactic + semantic results; otherwise the results would be empty.
- var markup =
-@"{|caret:|}class C { /* one
-
-two
-three */ }
-";
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
-
- var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
- ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(4, 0))];
- var options = ClassificationOptions.Default;
- var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
- document, spans, isVS, options, CancellationToken.None);
-
- var expectedResults = new LSP.SemanticTokens();
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one'
- 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two'
- 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */'
- 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
- else
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one'
- 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two'
- 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */'
- 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
-
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
+ 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
+ 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
+ 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 1, 8, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 5, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"one'
+ 1, 0, 4, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'two '
+ 0, 4, 2, tokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""'
+ 1, 0, 6, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'three"'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
}
-
- [Theory, CombinatorialData]
- public async Task TestGetSemanticTokensRange_StringLiteral_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
+ else
{
- var markup =
-@"{|caret:|}class C
-{
- void M()
- {
- var x = @""one
-two """"
-three"";
- }
-}
-";
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
+ 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
+ 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
+ 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
+ 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
+ 1, 8, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 5, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"one'
+ 1, 0, 4, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'two '
+ 0, 4, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringEscapeCharacter], 0, // '""'
+ 1, 0, 6, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'three"'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
+ }
- var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
- ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))];
- var options = ClassificationOptions.Default;
- var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
- document, spans, isVS, options, CancellationToken.None);
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensRange_Regex_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
+ {
+ var markup =
+ """
+ {|caret:|}using System.Text.RegularExpressions;
- var expectedResults = new LSP.SemanticTokens();
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
- 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
- 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
- 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 1, 8, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 5, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"one'
- 1, 0, 4, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'two '
- 0, 4, 2, tokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""'
- 1, 0, 6, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'three"'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
- else
+ class C
{
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
- 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
- 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
- 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
- 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
- 1, 8, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 5, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"one'
- 1, 0, 4, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'two '
- 0, 4, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringEscapeCharacter], 0, // '""'
- 1, 0, 6, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'three"'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
+ void M()
+ {
+ var x = new Regex("(abc)*");
+ }
}
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
- }
+ """;
- [Theory, CombinatorialData]
- public async Task TestGetSemanticTokensRange_Regex_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
+
+ var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
+ ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))];
+ var options = ClassificationOptions.Default;
+ var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
+ document, spans, isVS, options, CancellationToken.None);
+
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
{
- var markup =
-@"{|caret:|}using System.Text.RegularExpressions;
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
+ 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions'
+ 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
+ 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
+ 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
+ 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
+ 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex'
+ 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '('
+ 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc'
+ 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*'
+ 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
+ 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
+ ];
+ }
+ else
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
+ 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions'
+ 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
+ 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
+ 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
+ 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
+ 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
+ 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
+ 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
+ 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex'
+ 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '('
+ 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc'
+ 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*'
+ 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
+ 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
+ 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
+ ];
+ }
-class C
-{
- void M()
- {
- var x = new Regex(""(abc)*"");
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
}
-}
-";
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
-
- var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
- ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))];
- var options = ClassificationOptions.Default;
- var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
- document, spans, isVS, options, CancellationToken.None);
+ [Theory, CombinatorialData]
+ [WorkItem("https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1710519")]
+ public async Task TestGetSemanticTokensRange_RegexWithComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
+ {
+ var markup =
+ """
+ {|caret:|}using System.Text.RegularExpressions;
- var expectedResults = new LSP.SemanticTokens();
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
- 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions'
- 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
- 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
- 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
- 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
- 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex'
- 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '('
- 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc'
- 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*'
- 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
- 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
- ];
- }
- else
+ class C
{
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
- 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions'
- 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
- 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
- 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
- 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
- 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
- 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
- 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
- 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex'
- 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '('
- 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc'
- 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*'
- 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
- 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
- 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
- ];
+ void M()
+ {
+ var x = new Regex(@"(abc)* #comment
+ ", RegexOptions.IgnorePatternWhitespace);
+ }
}
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
- }
+ """;
- [Theory, CombinatorialData]
- [WorkItem("https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1710519")]
- public async Task TestGetSemanticTokensRange_RegexWithComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS)
- {
- var markup =
-@"{|caret:|}using System.Text.RegularExpressions;
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
-class C
-{
- void M()
- {
- var x = new Regex(@""(abc)* #comment
- "", RegexOptions.IgnorePatternWhitespace);
- }
-}
-";
-
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
+ var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
+ var text = await document.GetTextAsync();
+ var options = ClassificationOptions.Default;
+ var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
+ document, spans: [text.Lines.GetLinePositionSpan(new(0, text.Length))], isVS, options: options, cancellationToken: CancellationToken.None);
- var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
- var options = ClassificationOptions.Default;
- var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
- document, spans: [], isVS, options: options, cancellationToken: CancellationToken.None);
+ var expectedResults = new LSP.SemanticTokens();
- var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
+ 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions'
+ 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
+ 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
+ 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
+ 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
+ 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex'
+ 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
+ 0, 1, 2, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '('
+ 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc'
+ 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // ' '
+ 0, 1, 9, tokenTypeToIndex[ClassificationTypeNames.RegexComment], 0, // '#comment'
+ 1, 0, 27, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '"'
+ 0, 27, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ','
+ 0, 2, 12, tokenTypeToIndex[ClassificationTypeNames.EnumName], 0, // 'RegexOptions'
+ 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 23, tokenTypeToIndex[ClassificationTypeNames.EnumMemberName], 0, // 'IgnorePatternWhitespace'
+ 0, 23, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
+ 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
+ 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
+ ];
+ }
+ else
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
+ 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions'
+ 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
+ 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
+ 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
+ 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
+ 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
+ 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
+ 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
+ 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
+ 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
+ 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
+ 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex'
+ 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
+ 0, 1, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"'
+ 0, 2, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '('
+ 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc'
+ 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // ' '
+ 0, 1, 9, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexComment], 0, // '#comment'
+ 1, 0, 27, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '"'
+ 0, 27, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ','
+ 0, 2, 12, tokenTypeToIndex[SemanticTokenTypes.Enum], 0, // 'RegexOptions'
+ 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
+ 0, 1, 23, tokenTypeToIndex[SemanticTokenTypes.EnumMember], 0, // 'IgnorePatternWhitespace'
+ 0, 23, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
+ 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
+ 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
+ 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
+ ];
+ }
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
- 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions'
- 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C'
- 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
- 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
- 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
- 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex'
- 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '('
- 0, 1, 2, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '('
- 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc'
- 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // ' '
- 0, 1, 9, tokenTypeToIndex[ClassificationTypeNames.RegexComment], 0, // '#comment'
- 1, 0, 27, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '"'
- 0, 27, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ','
- 0, 2, 12, tokenTypeToIndex[ClassificationTypeNames.EnumName], 0, // 'RegexOptions'
- 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 23, tokenTypeToIndex[ClassificationTypeNames.EnumMemberName], 0, // 'IgnorePatternWhitespace'
- 0, 23, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';'
- 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
- 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // }
- ];
- }
- else
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using'
- 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions'
- 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
- 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C'
- 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
- 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void'
- 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
- 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{'
- 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var'
- 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x'
- 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '='
- 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new'
- 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex'
- 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '('
- 0, 1, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"'
- 0, 2, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '('
- 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc'
- 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // ' '
- 0, 1, 9, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexComment], 0, // '#comment'
- 1, 0, 27, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '"'
- 0, 27, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ','
- 0, 2, 12, tokenTypeToIndex[SemanticTokenTypes.Enum], 0, // 'RegexOptions'
- 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.'
- 0, 1, 23, tokenTypeToIndex[SemanticTokenTypes.EnumMember], 0, // 'IgnorePatternWhitespace'
- 0, 23, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')'
- 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';'
- 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
- 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // }
- ];
- }
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
+ }
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results));
- }
+ [Theory, CombinatorialData]
+ public void TestGetSemanticTokensRange_AssertCustomTokenTypes(bool isVS)
+ {
+ var capabilities = GetCapabilities(isVS);
+ var schema = SemanticTokensSchema.GetSchema(capabilities.HasVisualStudioLspCapability());
- [Theory, CombinatorialData]
- public void TestGetSemanticTokensRange_AssertCustomTokenTypes(bool isVS)
+ var expectedNames = ClassificationTypeNames.AllTypeNames.Where(s => !ClassificationTypeNames.AdditiveTypeNames.Contains(s));
+ foreach (var expectedClassificationName in expectedNames)
{
- var capabilities = GetCapabilities(isVS);
- var schema = SemanticTokensSchema.GetSchema(capabilities.HasVisualStudioLspCapability());
+ // Assert that the classification type name exists and is mapped to a semantic token name.
+ Assert.True(schema.TokenTypeMap.ContainsKey(expectedClassificationName), $"Missing token type for {expectedClassificationName}.");
- var expectedNames = ClassificationTypeNames.AllTypeNames.Where(s => !ClassificationTypeNames.AdditiveTypeNames.Contains(s));
- foreach (var expectedClassificationName in expectedNames)
- {
- // Assert that the classification type name exists and is mapped to a semantic token name.
- Assert.True(schema.TokenTypeMap.ContainsKey(expectedClassificationName), $"Missing token type for {expectedClassificationName}.");
-
- var tokenName = schema.TokenTypeMap[expectedClassificationName];
- Assert.True(schema.AllTokenTypes.Contains(tokenName));
- }
+ var tokenName = schema.TokenTypeMap[expectedClassificationName];
+ Assert.True(schema.AllTokenTypes.Contains(tokenName));
}
}
}
diff --git a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs
index 3b7b832625688..a2cea4ca9be85 100644
--- a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs
+++ b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs
@@ -12,60 +12,57 @@
using Xunit.Abstractions;
using LSP = Roslyn.LanguageServer.Protocol;
-namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens
+namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens;
+
+public sealed class SemanticTokensRangesTests(ITestOutputHelper testOutputHelper) : AbstractSemanticTokensTests(testOutputHelper)
{
- public class SemanticTokensRangesTests : AbstractSemanticTokensTests
+ [Theory, CombinatorialData]
+ public async Task TestGetSemanticTokensRanges_FullDocAsync(bool mutatingLspWorkspace, bool isVS)
{
- public SemanticTokensRangesTests(ITestOutputHelper testOutputHelper) : base(testOutputHelper)
- {
- }
+ var markup =
+ """
+ {|caret:|}// Comment
+ static class C { }
- [Theory, CombinatorialData]
- public async Task TestGetSemanticTokensRanges_FullDocAsync(bool mutatingLspWorkspace, bool isVS)
- {
- var markup =
-@"{|caret:|}// Comment
-static class C { }
-";
- await using var testLspServer = await CreateTestLspServerAsync(
- markup, mutatingLspWorkspace, GetCapabilities(isVS));
+ """;
+ await using var testLspServer = await CreateTestLspServerAsync(
+ markup, mutatingLspWorkspace, GetCapabilities(isVS));
- var ranges = new[] { new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) } };
- var results = await RunGetSemanticTokensRangesAsync(testLspServer, testLspServer.GetLocations("caret").First(), ranges);
+ var ranges = new[] { new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) } };
+ var results = await RunGetSemanticTokensRangesAsync(testLspServer, testLspServer.GetLocations("caret").First(), ranges);
- var expectedResults = new LSP.SemanticTokens();
- var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
- if (isVS)
- {
- expectedResults.Data =
+ var expectedResults = new LSP.SemanticTokens();
+ var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
+ if (isVS)
+ {
+ expectedResults.Data =
#pragma warning disable format // Force explicit column spacing.
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
- 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
- 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
- else
- {
- expectedResults.Data =
- [
- // Line | Char | Len | Token type | Modifier
- 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
- 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
- 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
- 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
- 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
- ];
- }
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
+ else
+ {
+ expectedResults.Data =
+ [
+ // Line | Char | Len | Token type | Modifier
+ 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment'
+ 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static'
+ 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class'
+ 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{'
+ 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}'
+ ];
+ }
#pragma warning restore format
- await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false);
- AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data));
- }
+ await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false);
+ AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data));
}
}
diff --git a/src/Tools/SemanticSearch/BuildTask/GenerateFilteredReferenceAssembliesTask.cs b/src/Tools/SemanticSearch/BuildTask/GenerateFilteredReferenceAssembliesTask.cs
index f92be172c469a..5a1203605807c 100644
--- a/src/Tools/SemanticSearch/BuildTask/GenerateFilteredReferenceAssembliesTask.cs
+++ b/src/Tools/SemanticSearch/BuildTask/GenerateFilteredReferenceAssembliesTask.cs
@@ -66,6 +66,11 @@ public sealed class GenerateFilteredReferenceAssembliesTask : Task
[Required]
public string ApisDir { get; private set; } = null!;
+ ///
+ /// True to report an error if any changes in Semantic Search APIs are detected.
+ ///
+ public bool RequireNoApiChanges { get; private set; } = false;
+
public override bool Execute()
{
try
@@ -123,12 +128,14 @@ internal void ExecuteImpl(IEnumerable<(string apiSpecPath, IReadOnlyList
return;
}
- WriteApis(Path.Combine(ApisDir, assemblyName + ".txt"), peImageBuffer);
+ WriteApis(assemblyName, peImageBuffer);
}
}
- internal void WriteApis(string outputFilePath, byte[] peImage)
+ internal void WriteApis(string assemblyName, byte[] peImage)
{
+ string outputFilePath = Path.Combine(ApisDir, assemblyName + ".txt");
+
using var readableStream = new MemoryStream(peImage, writable: false);
var metadataRef = MetadataReference.CreateFromStream(readableStream);
var compilation = CSharpCompilation.Create("Metadata", references: [metadataRef]);
@@ -149,9 +156,39 @@ internal void WriteApis(string outputFilePath, byte[] peImage)
var newContent = $"# Generated, do not update manually{Environment.NewLine}" +
string.Join(Environment.NewLine, apis);
+ if (RequireNoApiChanges)
+ {
+ var oldContent = "";
+
+ if (File.Exists(outputFilePath))
+ {
+ try
+ {
+ oldContent = File.ReadAllText(outputFilePath, Encoding.UTF8);
+ }
+ catch (FileNotFoundException)
+ {
+ }
+ catch (Exception e)
+ {
+ Log.LogError($"Unable to read '{outputFilePath}': {e.Message}");
+ return;
+ }
+ }
+
+ if (oldContent != newContent)
+ {
+ Log.LogError(
+ $"APIs listed in file '{outputFilePath}' do not match the public APIs exposed by '{assemblyName}'. " +
+ $"Build SemanticSearch.ReferenceAssemblies project locally to update the file and review the changes.");
+
+ return;
+ }
+ }
+
try
{
- File.WriteAllText(outputFilePath, newContent);
+ File.WriteAllText(outputFilePath, newContent, Encoding.UTF8);
Log.LogMessage($"Baseline updated: '{outputFilePath}'");
}
catch (Exception e)
diff --git a/src/Tools/SemanticSearch/ReferenceAssemblies/Apis/System.Collections.Immutable.txt b/src/Tools/SemanticSearch/ReferenceAssemblies/Apis/System.Collections.Immutable.txt
index 1977066cdea78..27686a749ff3d 100644
--- a/src/Tools/SemanticSearch/ReferenceAssemblies/Apis/System.Collections.Immutable.txt
+++ b/src/Tools/SemanticSearch/ReferenceAssemblies/Apis/System.Collections.Immutable.txt
@@ -20,6 +20,8 @@ System.Collections.Frozen.FrozenDictionary`2.get_Item(`0)
System.Collections.Frozen.FrozenDictionary`2.get_Keys
System.Collections.Frozen.FrozenDictionary`2.get_Values
System.Collections.Frozen.FrozenSet
+System.Collections.Frozen.FrozenSet.Create``1(System.Collections.Generic.IEqualityComparer{``0},System.ReadOnlySpan{``0})
+System.Collections.Frozen.FrozenSet.Create``1(System.ReadOnlySpan{``0})
System.Collections.Frozen.FrozenSet.ToFrozenSet``1(System.Collections.Generic.IEnumerable{``0},System.Collections.Generic.IEqualityComparer{``0})
System.Collections.Frozen.FrozenSet`1
System.Collections.Frozen.FrozenSet`1.Contains(`0)
diff --git a/src/Tools/SemanticSearch/ReferenceAssemblies/SemanticSearch.ReferenceAssemblies.csproj b/src/Tools/SemanticSearch/ReferenceAssemblies/SemanticSearch.ReferenceAssemblies.csproj
index e241734e01deb..35388a82d149c 100644
--- a/src/Tools/SemanticSearch/ReferenceAssemblies/SemanticSearch.ReferenceAssemblies.csproj
+++ b/src/Tools/SemanticSearch/ReferenceAssemblies/SemanticSearch.ReferenceAssemblies.csproj
@@ -45,7 +45,7 @@
'%(ReferencePath.FileName)' == 'Microsoft.CodeAnalysis.CSharp'" />
-
+
<_InputFile Include="@(ApiSet)" />
<_InputFile Include="@(_InputReference)" />
@@ -58,7 +58,13 @@
-
+
+
+ <_RequireNoApiChanges>false
+ <_RequireNoApiChanges Condition="'$(ContinuousIntegrationBuildCorrectness)' == 'true'">true
+
+
+
@@ -66,7 +72,7 @@
-
+