diff --git a/sway-lsp/benches/lsp_benchmarks/compile.rs b/sway-lsp/benches/lsp_benchmarks/compile.rs index fdab8f01094..4a5f8f85c12 100644 --- a/sway-lsp/benches/lsp_benchmarks/compile.rs +++ b/sway-lsp/benches/lsp_benchmarks/compile.rs @@ -29,7 +29,7 @@ fn benchmarks(c: &mut Criterion) { b.iter(|| { let engines = Engines::default(); let _ = black_box( - session::compile(&build_plan, &engines, None, lsp_mode.clone(), experimental) + session::compile(&build_plan, &engines, None, lsp_mode.as_ref(), experimental) .unwrap(), ); }) @@ -38,12 +38,19 @@ fn benchmarks(c: &mut Criterion) { c.bench_function("traverse", |b| { let engines = Engines::default(); let results = black_box( - session::compile(&build_plan, &engines, None, lsp_mode.clone(), experimental).unwrap(), + session::compile(&build_plan, &engines, None, lsp_mode.as_ref(), experimental).unwrap(), ); let session = Arc::new(session::Session::new()); b.iter(|| { - let _ = - black_box(session::traverse(results.clone(), &engines, session.clone()).unwrap()); + let _ = black_box( + session::traverse( + results.clone(), + &engines, + session.clone(), + lsp_mode.as_ref(), + ) + .unwrap(), + ); }) }); @@ -53,7 +60,7 @@ fn benchmarks(c: &mut Criterion) { b.iter(|| { for _ in 0..NUM_DID_CHANGE_ITERATIONS { let _ = black_box( - session::compile(&build_plan, &engines, None, lsp_mode.clone(), experimental) + session::compile(&build_plan, &engines, None, lsp_mode.as_ref(), experimental) .unwrap(), ); } diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 67b85630e7b..3e28892d611 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -12,7 +12,8 @@ use crate::{ }, error::{DirectoryError, DocumentError, LanguageServerError}, traverse::{ - dependency, lexed_tree, parsed_tree::ParsedTree, typed_tree::TypedTree, ParseContext, + dependency, lexed_tree::LexedTree, parsed_tree::ParsedTree, typed_tree::TypedTree, + ParseContext, }, }; use dashmap::DashMap; @@ -31,6 +32,7 @@ use std::{ sync::{atomic::AtomicBool, Arc}, time::SystemTime, }; +use sway_ast::{attribute::Annotated, ItemKind}; use sway_core::{ decl_engine::DeclEngine, language::{ @@ -275,7 +277,7 @@ pub fn compile( build_plan: &BuildPlan, engines: &Engines, retrigger_compilation: Option>, - lsp_mode: Option, + lsp_mode: Option<&LspConfig>, experimental: sway_core::ExperimentalFlags, ) -> Result, Handler)>, LanguageServerError> { let _p = tracing::trace_span!("compile").entered(); @@ -283,7 +285,7 @@ pub fn compile( build_plan, BuildTarget::default(), true, - lsp_mode, + lsp_mode.cloned(), true, engines, retrigger_compilation, @@ -298,9 +300,20 @@ pub fn traverse( results: Vec<(Option, Handler)>, engines_clone: &Engines, session: Arc, + lsp_mode: Option<&LspConfig>, ) -> Result, LanguageServerError> { let _p = tracing::trace_span!("traverse").entered(); - session.token_map.clear(); + let modified_file = lsp_mode.and_then(|mode| { + mode.file_versions + .iter() + .find_map(|(path, version)| version.map(|_| path.clone())) + }); + if let Some(path) = &modified_file { + session.token_map.remove_tokens_for_file(path); + } else { + session.token_map.clear(); + } + session.metrics.clear(); let mut diagnostics: CompileResults = (Vec::default(), Vec::default()); let results_len = results.len(); @@ -337,6 +350,14 @@ pub fn traverse( let path = engines.se().get_path(source_id); let program_id = program_id_from_path(&path, engines)?; session.metrics.insert(program_id, metrics); + + if let Some(modified_file) = &modified_file { + let modified_program_id = program_id_from_path(modified_file, engines)?; + // We can skip traversing the programs for this iteration as they are unchanged. + if program_id != modified_program_id { + continue; + } + } } // Get a reference to the typed program AST. @@ -356,17 +377,23 @@ pub fn traverse( // The final element in the results is the main program. if i == results_len - 1 { // First, populate our token_map with sway keywords. - lexed_tree::parse(&lexed, &ctx); + let lexed_tree = LexedTree::new(&ctx); + lexed_tree.collect_module_kinds(&lexed); + parse_lexed_program(&lexed, &ctx, &modified_file, |an, _ctx| { + lexed_tree.traverse_node(an) + }); // Next, populate our token_map with un-typed yet parsed ast nodes. let parsed_tree = ParsedTree::new(&ctx); parsed_tree.collect_module_spans(&parsed); - parse_ast_to_tokens(&parsed, &ctx, |an, _ctx| parsed_tree.traverse_node(an)); + parse_ast_to_tokens(&parsed, &ctx, &modified_file, |an, _ctx| { + parsed_tree.traverse_node(an) + }); // Finally, populate our token_map with typed ast nodes. let typed_tree = TypedTree::new(&ctx); typed_tree.collect_module_spans(typed_program); - parse_ast_to_typed_tokens(typed_program, &ctx, |node, _ctx| { + parse_ast_to_typed_tokens(typed_program, &ctx, &modified_file, |node, _ctx| { typed_tree.traverse_node(node); }); @@ -376,11 +403,11 @@ pub fn traverse( compiled_program.typed = Some(typed_program.clone()); } else { // Collect tokens from dependencies and the standard library prelude. - parse_ast_to_tokens(&parsed, &ctx, |an, ctx| { + parse_ast_to_tokens(&parsed, &ctx, &modified_file, |an, ctx| { dependency::collect_parsed_declaration(an, ctx); }); - parse_ast_to_typed_tokens(typed_program, &ctx, |node, ctx| { + parse_ast_to_typed_tokens(typed_program, &ctx, &modified_file, |node, ctx| { dependency::collect_typed_declaration(node, ctx); }); } @@ -406,7 +433,7 @@ pub fn parse_project( &build_plan, engines, retrigger_compilation, - lsp_mode.clone(), + lsp_mode.as_ref(), experimental, )?; @@ -417,7 +444,7 @@ pub fn parse_project( return Err(LanguageServerError::ProgramsIsNone); } - let diagnostics = traverse(results, engines, session.clone())?; + let diagnostics = traverse(results, engines, session.clone(), lsp_mode.as_ref())?; if let Some(config) = &lsp_mode { // Only write the diagnostics results on didSave or didOpen. if !config.optimized_build { @@ -435,13 +462,60 @@ pub fn parse_project( Ok(()) } +/// Parse the [LexedProgram] to populate the [TokenMap] with lexed nodes. +pub fn parse_lexed_program( + lexed_program: &LexedProgram, + ctx: &ParseContext, + modified_file: &Option, + f: impl Fn(&Annotated, &ParseContext) + Sync, +) { + let should_process = |item: &&Annotated| { + modified_file + .as_ref() + .map(|path| { + item.span() + .source_id() + .map_or(false, |id| ctx.engines.se().get_path(id) == *path) + }) + .unwrap_or(true) + }; + + lexed_program + .root + .tree + .items + .iter() + .chain( + lexed_program + .root + .submodules_recursive() + .flat_map(|(_, submodule)| &submodule.module.tree.items), + ) + .filter(should_process) + .collect::>() + .par_iter() + .for_each(|item| f(item, ctx)); +} + /// Parse the [ParseProgram] AST to populate the [TokenMap] with parsed AST nodes. fn parse_ast_to_tokens( parse_program: &ParseProgram, ctx: &ParseContext, + modified_file: &Option, f: impl Fn(&AstNode, &ParseContext) + Sync, ) { - let nodes = parse_program + let should_process = |node: &&AstNode| { + modified_file + .as_ref() + .map(|path| { + node.span + .source_id() + .map_or(false, |id| ctx.engines.se().get_path(id) == *path) + }) + .unwrap_or(true) + }; + + parse_program .root .tree .root_nodes @@ -452,17 +526,31 @@ fn parse_ast_to_tokens( .submodules_recursive() .flat_map(|(_, submodule)| &submodule.module.tree.root_nodes), ) - .collect::>(); - nodes.par_iter().for_each(|n| f(n, ctx)); + .filter(should_process) + .collect::>() + .par_iter() + .for_each(|n| f(n, ctx)); } /// Parse the [ty::TyProgram] AST to populate the [TokenMap] with typed AST nodes. fn parse_ast_to_typed_tokens( typed_program: &ty::TyProgram, ctx: &ParseContext, + modified_file: &Option, f: impl Fn(&ty::TyAstNode, &ParseContext) + Sync, ) { - let nodes = typed_program + let should_process = |node: &&ty::TyAstNode| { + modified_file + .as_ref() + .map(|path| { + node.span + .source_id() + .map_or(false, |id| ctx.engines.se().get_path(id) == *path) + }) + .unwrap_or(true) + }; + + typed_program .root .all_nodes .iter() @@ -472,8 +560,10 @@ fn parse_ast_to_typed_tokens( .submodules_recursive() .flat_map(|(_, submodule)| &submodule.module.all_nodes), ) - .collect::>(); - nodes.par_iter().for_each(|n| f(n, ctx)); + .filter(should_process) + .collect::>() + .par_iter() + .for_each(|n| f(n, ctx)); } /// Create runnables if the `TyProgramKind` of the `TyProgram` is a script. diff --git a/sway-lsp/src/core/token_map.rs b/sway-lsp/src/core/token_map.rs index c827220f8bf..ce410e8723c 100644 --- a/sway-lsp/src/core/token_map.rs +++ b/sway-lsp/src/core/token_map.rs @@ -8,7 +8,7 @@ use dashmap::{ DashMap, }; use lsp_types::{Position, Url}; -use std::{thread, time::Duration}; +use std::{path::PathBuf, thread, time::Duration}; use sway_core::{engine_threading::SpannedWithEngines, language::ty, type_system::TypeId, Engines}; use sway_types::Ident; @@ -235,6 +235,15 @@ impl<'a> TokenMap { _ => None, }) } + + /// Remove all tokens for the given file from the token map. + pub fn remove_tokens_for_file(&self, path_to_remove: &PathBuf) { + self.0.retain(|key, _value| { + key.path + .as_ref() + .map_or(true, |path| path != path_to_remove) + }); + } } impl std::ops::Deref for TokenMap { diff --git a/sway-lsp/src/traverse/lexed_tree.rs b/sway-lsp/src/traverse/lexed_tree.rs index f441321c500..a95f0b55881 100644 --- a/sway-lsp/src/traverse/lexed_tree.rs +++ b/sway-lsp/src/traverse/lexed_tree.rs @@ -4,8 +4,8 @@ use crate::{ }; use rayon::iter::{ParallelBridge, ParallelIterator}; use sway_ast::{ - assignable::ElementAccess, expr::LoopControlFlow, ty::TyTupleDescriptor, Assignable, - CodeBlockContents, ConfigurableField, Expr, ExprArrayDescriptor, ExprStructField, + assignable::ElementAccess, attribute::Annotated, expr::LoopControlFlow, ty::TyTupleDescriptor, + Assignable, CodeBlockContents, ConfigurableField, Expr, ExprArrayDescriptor, ExprStructField, ExprTupleDescriptor, FnArg, FnArgs, FnSignature, IfCondition, IfExpr, ItemAbi, ItemConfigurable, ItemConst, ItemEnum, ItemFn, ItemImpl, ItemImplItem, ItemKind, ItemStorage, ItemStruct, ItemTrait, ItemTypeAlias, ItemUse, MatchBranchKind, ModuleKind, Pattern, @@ -15,21 +15,28 @@ use sway_ast::{ use sway_core::language::{lexed::LexedProgram, HasSubmodules}; use sway_types::{Ident, Span, Spanned}; -pub fn parse(lexed_program: &LexedProgram, ctx: &ParseContext) { - insert_module_kind(ctx, &lexed_program.root.tree.kind); - adaptive_iter(&lexed_program.root.tree.items, |item| { - item.value.parse(ctx); - }); - - lexed_program - .root - .submodules_recursive() - .for_each(|(_, dep)| { - insert_module_kind(ctx, &dep.module.tree.kind); - adaptive_iter(&dep.module.tree.items, |item| { - item.value.parse(ctx); +pub struct LexedTree<'a> { + ctx: &'a ParseContext<'a>, +} + +impl<'a> LexedTree<'a> { + pub fn new(ctx: &'a ParseContext<'a>) -> Self { + Self { ctx } + } + + pub fn traverse_node(&self, node: &Annotated) { + node.value.parse(self.ctx); + } + + pub fn collect_module_kinds(&self, lexed_program: &LexedProgram) { + insert_module_kind(self.ctx, &lexed_program.root.tree.kind); + lexed_program + .root + .submodules_recursive() + .for_each(|(_, dep)| { + insert_module_kind(self.ctx, &dep.module.tree.kind); }); - }); + } } fn insert_module_kind(ctx: &ParseContext, kind: &ModuleKind) {