Skip to content

Commit c6dc4e5

Browse files
committed
debugging sync module
1 parent d95e9f2 commit c6dc4e5

File tree

6 files changed

+55
-7
lines changed

6 files changed

+55
-7
lines changed

sway-lsp/src/capabilities/semantic_tokens.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,13 @@ pub fn semantic_tokens_range(
2525
range: &Range,
2626
) -> Option<SemanticTokensRangeResult> {
2727
let _p = tracing::trace_span!("semantic_tokens_range").entered();
28+
eprintln!("semantic_tokens_range request for: url: {:?}", url.to_file_path().unwrap());
29+
let mut k = std::collections::HashSet::new();
30+
for i in token_map.iter() {
31+
let key = i.key().path.clone();
32+
k.insert(key.clone());
33+
}
34+
eprintln!("token map keys: {:?}", k);
2835
let tokens: Vec<_> = token_map
2936
.tokens_for_file(url)
3037
.filter(|item| {
@@ -33,6 +40,7 @@ pub fn semantic_tokens_range(
3340
token_range.start >= range.start && token_range.end <= range.end
3441
})
3542
.collect();
43+
eprintln!("semantic_tokens_range: token length: {:?} | url: {:?}", tokens.len(), url.to_file_path().unwrap());
3644
let sorted_tokens_refs = sort_tokens(&tokens);
3745
Some(semantic_tokens(&sorted_tokens_refs[..]).into())
3846
}

sway-lsp/src/core/session.rs

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -336,6 +336,7 @@ pub fn traverse(
336336
session: Arc<Session>,
337337
token_map: &TokenMap,
338338
lsp_mode: Option<&LspConfig>,
339+
program_id: ProgramId,
339340
) -> Result<Option<CompileResults>, LanguageServerError> {
340341
let _p = tracing::trace_span!("traverse").entered();
341342
let modified_file = lsp_mode.and_then(|mode| {
@@ -344,9 +345,11 @@ pub fn traverse(
344345
.find_map(|(path, version)| version.map(|_| path.clone()))
345346
});
346347
if let Some(path) = &modified_file {
348+
eprintln!("removing tokens for file: {:?}", path);
347349
token_map.remove_tokens_for_file(path);
348350
} else {
349-
token_map.clear();
351+
eprintln!("this is where we used to remove all tokens");
352+
token_map.remove_tokens_for_program(program_id);
350353
}
351354

352355
session.metrics.clear();
@@ -390,6 +393,7 @@ pub fn traverse(
390393
let modified_program_id = program_id_from_path(modified_file, engines)?;
391394
// We can skip traversing the programs for this iteration as they are unchanged.
392395
if program_id != modified_program_id {
396+
eprintln!("skipping traversing program: {:?}", program_id);
393397
continue;
394398
}
395399
}
@@ -465,33 +469,42 @@ pub fn parse_project(
465469
session: Arc<Session>,
466470
token_map: Arc<TokenMap>,
467471
) -> Result<(), LanguageServerError> {
472+
eprintln!("parse_project");
468473
let _p = tracing::trace_span!("parse_project").entered();
474+
let program_id = program_id_from_url(&uri, engines_clone)?;
475+
469476
let build_plan = session
470477
.build_plan_cache
471478
.get_or_update(&session.sync.manifest_path(), || build_plan(uri))?;
472479

480+
eprintln!("compile");
473481
let results = compile(
474482
&build_plan,
475483
engines_clone,
476484
retrigger_compilation,
477485
lsp_mode.as_ref(),
478486
)?;
479-
487+
eprintln!("compile done");
480488
// Check if the last result is None or if results is empty, indicating an error occurred in the compiler.
481489
// If we don't return an error here, then we will likely crash when trying to access the Engines
482490
// during traversal or when creating runnables.
483491
if results.last().is_none_or(|(value, _)| value.is_none()) {
484492
return Err(LanguageServerError::ProgramsIsNone);
485493
}
486494

495+
eprintln!("traverse");
487496
let diagnostics = traverse(
488497
results,
489498
engines_original.clone(),
490499
engines_clone,
491500
session.clone(),
492501
&token_map,
493502
lsp_mode.as_ref(),
503+
program_id,
494504
)?;
505+
let tokens_for_program = token_map.tokens_for_file(&uri).collect::<Vec<_>>();
506+
eprintln!("tokens_for_program: {:?}", tokens_for_program.len());
507+
495508
if let Some(config) = &lsp_mode {
496509
// Only write the diagnostics results on didSave or didOpen.
497510
if !config.optimized_build {
@@ -503,8 +516,6 @@ pub fn parse_project(
503516
}
504517

505518
session.runnables.clear();
506-
let path = uri.to_file_path().unwrap();
507-
let program_id = program_id_from_path(&path, engines_clone)?;
508519
if let Some(metrics) = session.metrics.get(&program_id) {
509520
// Check if the cached AST was returned by the compiler for the users workspace.
510521
// If it was, then we need to use the original engines.
@@ -690,6 +701,11 @@ pub fn program_id_from_path(
690701
Ok(program_id)
691702
}
692703

704+
pub fn program_id_from_url(url: &Url, engines: &Engines) -> Result<ProgramId, DirectoryError> {
705+
let path = url.to_file_path().unwrap();
706+
program_id_from_path(&path, engines)
707+
}
708+
693709
/// A cache for storing and retrieving BuildPlan objects.
694710
#[derive(Debug, Clone)]
695711
pub struct BuildPlanCache {

sway-lsp/src/core/token_map.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ pub use crate::core::token_map_ext::TokenMapExt;
2020
///
2121
/// The TokenMap is a wrapper around a [DashMap], which is a concurrent HashMap.
2222
#[derive(Debug, Default)]
23-
pub struct TokenMap(DashMap<TokenIdent, Token>);
23+
pub struct TokenMap(pub DashMap<TokenIdent, Token>);
2424

2525
impl<'a> TokenMap {
2626
/// Create a new token map.
@@ -256,6 +256,11 @@ impl<'a> TokenMap {
256256
self.0
257257
.retain(|key, _value| (key.path.as_ref() != Some(path_to_remove)));
258258
}
259+
260+
/// Remove all tokens for the given program from the token map.
261+
pub fn remove_tokens_for_program(&self, program_id: ProgramId) {
262+
self.0.retain(|key, _value| (key.program_id() != Some(program_id)));
263+
}
259264
}
260265

261266
impl std::ops::Deref for TokenMap {

sway-lsp/src/handlers/notification.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,15 @@ pub async fn handle_did_open_text_document(
2323
let (uri, session) = state
2424
.uri_and_session_from_workspace(&params.text_document.uri)
2525
.await?;
26+
eprintln!("did_open for {:?}", uri.to_file_path().unwrap());
27+
2628
state.documents.handle_open_file(&uri).await;
2729

2830
send_new_compilation_request(state, session.clone(), &uri, None, false);
2931
state.is_compiling.store(true, Ordering::SeqCst);
32+
eprintln!("did_open: waiting for parsing to finish for {:?}", uri.to_file_path().unwrap());
3033
state.wait_for_parsing().await;
34+
eprintln!("did_open: publishing diagnostics for {:?}", uri.to_file_path().unwrap());
3135
state
3236
.publish_diagnostics(uri, params.text_document.uri, session)
3337
.await;
@@ -42,9 +46,11 @@ fn send_new_compilation_request(
4246
version: Option<i32>,
4347
optimized_build: bool,
4448
) {
49+
eprintln!("new compilation request for {:?}", uri.to_file_path().unwrap());
4550
let file_versions = file_versions(&state.documents, uri, version.map(|v| v as u64));
4651

4752
if state.is_compiling.load(Ordering::SeqCst) {
53+
eprintln!("cancelling existing compilation and retriggering compilation for {:?}", uri.to_file_path().unwrap());
4854
// If we are already compiling, then we need to retrigger compilation
4955
state.retrigger_compilation.store(true, Ordering::SeqCst);
5056
}
@@ -58,6 +64,7 @@ fn send_new_compilation_request(
5864
}
5965
}
6066

67+
eprintln!("sending compilation request for {:?}", uri.to_file_path().unwrap());
6168
let _ = state
6269
.cb_tx
6370
.send(TaskMessage::CompilationContext(CompilationContext {

sway-lsp/src/handlers/request.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -329,16 +329,20 @@ pub async fn handle_semantic_tokens_range(
329329
state: &ServerState,
330330
params: SemanticTokensRangeParams,
331331
) -> Result<Option<SemanticTokensRangeResult>> {
332+
eprintln!("semantic tokens req, waiting for parsing| params uri: {:?}", params.text_document.uri.to_file_path().unwrap());
332333
let _ = state.wait_for_parsing().await;
333334
match state
334335
.uri_and_session_from_workspace(&params.text_document.uri)
335336
.await
336337
{
337-
Ok((uri, _session)) => Ok(capabilities::semantic_tokens::semantic_tokens_range(
338+
Ok((uri, _session)) => {
339+
eprintln!("semantic tokens req, got uri: {:?}", uri.to_file_path().unwrap());
340+
Ok(capabilities::semantic_tokens::semantic_tokens_range(
338341
&state.token_map,
339342
&uri,
340343
&params.range,
341-
)),
344+
))
345+
},
342346
Err(err) => {
343347
tracing::error!("{}", err.to_string());
344348
Ok(None)

sway-lsp/src/server_state.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -137,6 +137,7 @@ impl ServerState {
137137
while let Ok(msg) = rx.recv() {
138138
match msg {
139139
TaskMessage::CompilationContext(ctx) => {
140+
eprintln!("compilation context received for {:?}", ctx.uri.as_ref().unwrap().to_file_path().unwrap());
140141
let uri = ctx.uri.as_ref().unwrap().clone();
141142
let session = ctx.session.as_ref().unwrap().clone();
142143
let engines_original = ctx.engines.clone();
@@ -160,6 +161,7 @@ impl ServerState {
160161
file_versions: ctx.file_versions,
161162
});
162163

164+
eprintln!("compiling for {:?}", uri.to_file_path().unwrap());
163165
// Set the is_compiling flag to true so that the wait_for_parsing function knows that we are compiling
164166
is_compiling.store(true, Ordering::SeqCst);
165167
match session::parse_project(
@@ -176,6 +178,7 @@ impl ServerState {
176178
// Find the program id from the path
177179
match session::program_id_from_path(&path, &engines_clone) {
178180
Ok(program_id) => {
181+
eprintln!("compilation success for {:?}", uri.to_file_path().unwrap());
179182
// Use the program id to get the metrics for the program
180183
if let Some(metrics) = session.metrics.get(&program_id) {
181184
// It's very important to check if the workspace AST was reused to determine if we need to overwrite the engines.
@@ -198,23 +201,27 @@ impl ServerState {
198201
LastCompilationState::Success;
199202
}
200203
Err(err) => {
204+
eprintln!("compilation failed for {:?}", uri.to_file_path().unwrap());
201205
tracing::error!("{}", err.to_string());
202206
*last_compilation_state.write() =
203207
LastCompilationState::Failed;
204208
}
205209
}
206210
}
207211
Err(_err) => {
212+
eprintln!("compilation failed for {:?}", uri.to_file_path().unwrap());
208213
*last_compilation_state.write() = LastCompilationState::Failed;
209214
}
210215
}
211216

212217
// Reset the flags to false
213218
is_compiling.store(false, Ordering::SeqCst);
214219
retrigger_compilation.store(false, Ordering::SeqCst);
220+
eprintln!("resetting compilation flags for {:?}", uri.to_file_path().unwrap());
215221

216222
// Make sure there isn't any pending compilation work
217223
if rx.is_empty() {
224+
eprintln!("finished compilation, notify waiters for {:?}", uri.to_file_path().unwrap());
218225
// finished compilation, notify waiters
219226
finished_compilation.notify_waiters();
220227
}
@@ -303,6 +310,7 @@ impl ServerState {
303310
session: Arc<Session>,
304311
) {
305312
let diagnostics = self.diagnostics(&uri, session.clone());
313+
eprintln!("diagnostics len = {:?}", diagnostics.len());
306314
// Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec
307315
// in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics.
308316
if let Some(client) = self.client.as_ref() {

0 commit comments

Comments
 (0)