From 86b8cbb58f060160fed5b6219e60e348a2778212 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 13 Dec 2023 12:59:09 +1100 Subject: [PATCH 01/40] add Arcs around QE types --- sway-core/src/query_engine/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sway-core/src/query_engine/mod.rs b/sway-core/src/query_engine/mod.rs index efb26c2718c..1a8edab4265 100644 --- a/sway-core/src/query_engine/mod.rs +++ b/sway-core/src/query_engine/mod.rs @@ -45,10 +45,10 @@ pub struct ProgramsCacheEntry { pub type ProgramsCacheMap = HashMap; -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct QueryEngine { - parse_module_cache: RwLock, - programs_cache: RwLock, + parse_module_cache: Arc>, + programs_cache: Arc>, } impl QueryEngine { From 6c0668a29da2180259148db70bf65421124a3553 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 13 Dec 2023 13:00:58 +1100 Subject: [PATCH 02/40] clone engines --- sway-core/src/decl_engine/engine.rs | 23 +++++++++++++++++++++++ sway-core/src/engine_threading.rs | 2 +- sway-core/src/type_system/engine.rs | 13 +++++++++++++ sway-types/src/source_engine.rs | 16 ++++++++++++++++ 4 files changed, 53 insertions(+), 1 deletion(-) diff --git a/sway-core/src/decl_engine/engine.rs b/sway-core/src/decl_engine/engine.rs index fe789780fa8..44321b3d9c3 100644 --- a/sway-core/src/decl_engine/engine.rs +++ b/sway-core/src/decl_engine/engine.rs @@ -34,6 +34,29 @@ pub struct DeclEngine { parents: RwLock>>, } +impl Clone for DeclEngine { + fn clone(&self) -> Self { + let now = std::time::Instant::now(); + let de = DeclEngine { + function_slab: self.function_slab.clone(), + trait_slab: self.trait_slab.clone(), + trait_fn_slab: self.trait_fn_slab.clone(), + trait_type_slab: self.trait_type_slab.clone(), + impl_trait_slab: self.impl_trait_slab.clone(), + struct_slab: self.struct_slab.clone(), + storage_slab: self.storage_slab.clone(), + abi_slab: self.abi_slab.clone(), + constant_slab: self.constant_slab.clone(), + enum_slab: self.enum_slab.clone(), + type_alias_slab: self.type_alias_slab.clone(), + parents: RwLock::new(self.parents.read().expect("Lock is poisoned").clone()), + }; + + eprintln!("DeclEngine clone: {:?}", now.elapsed()); + de + } +} + pub trait DeclEngineGet { fn get(&self, index: &I) -> Arc; } diff --git a/sway-core/src/engine_threading.rs b/sway-core/src/engine_threading.rs index a68158923aa..debab75360e 100644 --- a/sway-core/src/engine_threading.rs +++ b/sway-core/src/engine_threading.rs @@ -6,7 +6,7 @@ use std::{ }; use sway_types::SourceEngine; -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default)] pub struct Engines { type_engine: TypeEngine, decl_engine: DeclEngine, diff --git a/sway-core/src/type_system/engine.rs b/sway-core/src/type_system/engine.rs index abc69cb7e58..19ed784906d 100644 --- a/sway-core/src/type_system/engine.rs +++ b/sway-core/src/type_system/engine.rs @@ -22,6 +22,19 @@ pub struct TypeEngine { id_map: RwLock>, } +impl Clone for TypeEngine { + fn clone(&self) -> Self { + let now = std::time::Instant::now(); + let te = TypeEngine { + slab: self.slab.clone(), + slab_source_ids: self.slab_source_ids.clone(), + id_map: RwLock::new(self.id_map.read().expect("Lock is poisoned").clone()), + }; + eprintln!("TypeEngine clone: {:?}", now.elapsed()); + te + } +} + impl TypeEngine { /// Inserts a [TypeInfo] into the [TypeEngine] and returns a [TypeId] /// referring to that [TypeInfo]. diff --git a/sway-types/src/source_engine.rs b/sway-types/src/source_engine.rs index 84b2dff1b30..1cdc3413738 100644 --- a/sway-types/src/source_engine.rs +++ b/sway-types/src/source_engine.rs @@ -24,6 +24,22 @@ pub struct SourceEngine { module_to_sources_map: RwLock>>, } +impl Clone for SourceEngine { + fn clone(&self) -> Self { + let now = std::time::Instant::now(); + let se = SourceEngine { + next_source_id: RwLock::new(*self.next_source_id.read().expect("Lock is poisoned")), + path_to_source_map: RwLock::new(self.path_to_source_map.read().expect("Lock is poisoned").clone()), + source_to_path_map: RwLock::new(self.source_to_path_map.read().expect("Lock is poisoned").clone()), + next_module_id: RwLock::new(*self.next_module_id.read().expect("Lock is poisoned")), + path_to_module_map: RwLock::new(self.path_to_module_map.read().expect("Lock is poisoned").clone()), + module_to_sources_map: RwLock::new(self.module_to_sources_map.read().expect("Lock is poisoned").clone()), + }; + eprintln!("SourceEngine clone: {:?}", now.elapsed()); + se + } +} + impl SourceEngine { /// This function retrieves an integer-based source ID for a provided path buffer. /// If an ID already exists for the given path, the function will return that From e9a33c4c00ce8e83d2487ceb3ba075233ddf014d Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 13 Dec 2023 13:01:50 +1100 Subject: [PATCH 03/40] pass a cloned engines into the compiler from the server --- sway-lsp/src/server_state.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index dc9668d63ce..9a25a1a3d3b 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -134,7 +134,18 @@ async fn run_blocking_parse_project( } } } - let parse_result = session::parse_project(&uri, &session.engines.read())?; + let now = std::time::Instant::now(); + let engines_clone = session.engines.read().clone(); + eprintln!("parse_project: engines_clone: {:?}", now.elapsed()); + + let now = std::time::Instant::now(); + let parse_result = session::parse_project(&uri, &engines_clone)?; + eprintln!("compilation_took: {:?}", now.elapsed()); + + let now = std::time::Instant::now(); + *session.engines.write() = engines_clone; + eprintln!("parse_project: engines_write: {:?}", now.elapsed()); + let (errors, warnings) = parse_result.diagnostics.clone(); session.write_parse_result(parse_result); *diagnostics = get_diagnostics(&warnings, &errors, session.engines.read().se()); From cf200c396f85d7a44e27f8f7e64bb804d3229459 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 13 Dec 2023 13:46:08 +1100 Subject: [PATCH 04/40] add cancel compilation atomic --- forc-pkg/src/pkg.rs | 9 +++++++- forc-plugins/forc-doc/src/main.rs | 1 + forc/src/ops/forc_check.rs | 2 +- sway-core/src/ir_generation/const_eval.rs | 1 + sway-core/src/lib.rs | 28 +++++++++++++++++++++++ sway-error/src/handler.rs | 5 ++++ sway-lsp/src/core/session.rs | 10 ++++---- sway-lsp/src/handlers/notification.rs | 5 ++++ sway-lsp/src/server_state.rs | 14 +++++++++--- test/src/ir_generation/mod.rs | 1 + 10 files changed, 67 insertions(+), 9 deletions(-) diff --git a/forc-pkg/src/pkg.rs b/forc-pkg/src/pkg.rs index b8865133fa3..e70404bc866 100644 --- a/forc-pkg/src/pkg.rs +++ b/forc-pkg/src/pkg.rs @@ -23,7 +23,7 @@ use std::{ io::Write, path::{Path, PathBuf}, str::FromStr, - sync::Arc, + sync::{Arc, atomic::AtomicBool}, }; pub use sway_core::Programs; use sway_core::{ @@ -1776,6 +1776,7 @@ pub fn compile( namespace, Some(&sway_build_config), &pkg.name, + None, ), Some(sway_build_config.clone()), metrics @@ -2581,6 +2582,7 @@ pub fn check( terse_mode: bool, include_tests: bool, engines: &Engines, + retrigger_compilation: Option>, ) -> anyhow::Result, Handler)>> { let mut lib_namespace_map = Default::default(); let mut source_map = SourceMap::new(); @@ -2627,6 +2629,7 @@ pub fn check( )? .include_tests(include_tests); + let now = std::time::Instant::now(); let input = manifest.entry_string()?; let handler = Handler::default(); let programs_res = sway_core::compile_to_ast( @@ -2636,7 +2639,11 @@ pub fn check( dep_namespace, Some(&build_config), &pkg.name, + retrigger_compilation.clone(), ); + eprintln!("compile_to_ast took: {:?}", now.elapsed()); + + let programs = match programs_res.as_ref() { Ok(programs) => programs, diff --git a/forc-plugins/forc-doc/src/main.rs b/forc-plugins/forc-doc/src/main.rs index 63192c9f67a..413b4fccd2c 100644 --- a/forc-plugins/forc-doc/src/main.rs +++ b/forc-plugins/forc-doc/src/main.rs @@ -218,6 +218,7 @@ pub fn compile_html( build_instructions.silent, tests_enabled, &engines, + None, )?; let raw_docs = if !build_instructions.no_deps { diff --git a/forc/src/ops/forc_check.rs b/forc/src/ops/forc_check.rs index 973d7e86a99..d8805a3c07a 100644 --- a/forc/src/ops/forc_check.rs +++ b/forc/src/ops/forc_check.rs @@ -34,7 +34,7 @@ pub fn check(command: CheckCommand, engines: &Engines) -> Result<(Option, package_name: &str, + retrigger_compilation: Option>, ) -> Result { // Type check the program. let typed_program_opt = ty::TyProgram::type_check( @@ -477,6 +479,8 @@ pub fn parsed_to_ast( package_name, ); + check_should_abort(handler, retrigger_compilation.clone())?; + let mut typed_program = match typed_program_opt { Ok(typed_program) => typed_program, Err(e) => return Err(e), @@ -524,6 +528,9 @@ pub fn parsed_to_ast( ), None => (None, None), }; + + check_should_abort(handler, retrigger_compilation.clone())?; + // Perform control flow analysis and extend with any errors. let _ = perform_control_flow_analysis( handler, @@ -533,6 +540,8 @@ pub fn parsed_to_ast( print_graph_url_format, ); + check_should_abort(handler, retrigger_compilation.clone())?; + // Evaluate const declarations, to allow storage slots initialization with consts. let mut ctx = Context::new(engines.se()); let mut md_mgr = MetadataManager::default(); @@ -596,6 +605,7 @@ pub fn compile_to_ast( initial_namespace: namespace::Module, build_config: Option<&BuildConfig>, package_name: &str, + retrigger_compilation: Option>, ) -> Result { let query_engine = engines.qe(); let mut metrics = PerformanceData::default(); @@ -617,6 +627,8 @@ pub fn compile_to_ast( }; } + check_should_abort(handler, retrigger_compilation.clone())?; + // Parse the program to a concrete syntax tree (CST). let parse_program_opt = time_expr!( "parse the program to a concrete syntax tree (CST)", @@ -626,6 +638,8 @@ pub fn compile_to_ast( metrics ); + check_should_abort(handler, retrigger_compilation.clone())?; + let (lexed_program, mut parsed_program) = match parse_program_opt { Ok(modules) => modules, Err(e) => { @@ -653,11 +667,14 @@ pub fn compile_to_ast( initial_namespace, build_config, package_name, + retrigger_compilation.clone(), ), build_config, metrics ); + check_should_abort(handler, retrigger_compilation.clone())?; + handler.dedup(); let programs = Programs::new(lexed_program, parsed_program, typed_res, metrics); @@ -693,6 +710,7 @@ pub fn compile_to_asm( initial_namespace, Some(&build_config), package_name, + None, )?; ast_to_asm(handler, engines, &ast_res, &build_config) } @@ -942,6 +960,16 @@ fn module_return_path_analysis( } } +fn check_should_abort(handler: &Handler, retrigger_compilation: Option>) -> Result<(), ErrorEmitted> { + if let Some(ref retrigger_compilation) = retrigger_compilation { + if retrigger_compilation.load(Ordering::Relaxed) { + return Err(handler.cancel()); + } + } + Ok(()) +} + + #[test] fn test_basic_prog() { let handler = Handler::default(); diff --git a/sway-error/src/handler.rs b/sway-error/src/handler.rs index ebc5111e54a..4420ad6f53f 100644 --- a/sway-error/src/handler.rs +++ b/sway-error/src/handler.rs @@ -34,6 +34,11 @@ impl Handler { ErrorEmitted { _priv: () } } + // todo: decide what to return here + pub fn cancel(&self) -> ErrorEmitted { + ErrorEmitted { _priv: () } + } + /// Emit the warning `warn`. pub fn emit_warn(&self, warn: CompileWarning) { self.inner.borrow_mut().warnings.push(warn); diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 4abd6290b60..cc2b06d032d 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -25,7 +25,7 @@ use lsp_types::{ use parking_lot::RwLock; use pkg::{manifest::ManifestFile, BuildPlan}; use rayon::iter::{ParallelBridge, ParallelIterator}; -use std::{ops::Deref, path::PathBuf, sync::Arc}; +use std::{ops::Deref, path::PathBuf, sync::{Arc, atomic::AtomicBool}}; use sway_core::{ decl_engine::DeclEngine, language::{ @@ -437,6 +437,7 @@ pub(crate) fn build_plan(uri: &Url) -> Result { pub fn compile( uri: &Url, engines: &Engines, + retrigger_compilation: Option>, ) -> Result, Handler)>, LanguageServerError> { let build_plan = build_plan(uri)?; let tests_enabled = true; @@ -446,6 +447,7 @@ pub fn compile( true, tests_enabled, engines, + retrigger_compilation, ) .map_err(LanguageServerError::FailedToCompile) } @@ -534,8 +536,8 @@ pub fn traverse( } /// Parses the project and returns true if the compiler diagnostics are new and should be published. -pub fn parse_project(uri: &Url, engines: &Engines) -> Result { - let results = compile(uri, engines)?; +pub fn parse_project(uri: &Url, engines: &Engines, retrigger_compilation: Option>) -> Result { + let results = compile(uri, engines, retrigger_compilation)?; let TraversalResult { diagnostics, programs, @@ -617,7 +619,7 @@ mod tests { let dir = get_absolute_path("sway-lsp/tests/fixtures"); let uri = get_url(&dir); let engines = Engines::default(); - let result = parse_project(&uri, &engines).expect_err("expected ManifestFileNotFound"); + let result = parse_project(&uri, &engines, None).expect_err("expected ManifestFileNotFound"); assert!(matches!( result, LanguageServerError::DocumentError( diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index a45f9c661a8..0fec656da27 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -1,6 +1,8 @@ //! This module is responsible for implementing handlers for Language Server //! Protocol. This module specifically handles notification messages sent by the Client. +use std::sync::atomic::Ordering; + use crate::{core::document, error::LanguageServerError, server_state::ServerState}; use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, @@ -39,6 +41,9 @@ pub async fn handle_did_change_text_document( session .write_changes_to_file(&uri, params.content_changes) .await?; + if *state.is_compiling.read() { + state.retrigger_compilation.store(true, Ordering::Relaxed); + } state .parse_project( uri, diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 9a25a1a3d3b..7f2b9c55c0c 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -12,7 +12,7 @@ use dashmap::DashMap; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; -use std::{path::PathBuf, sync::Arc}; +use std::{path::PathBuf, sync::{Arc, atomic::{AtomicBool, Ordering}}}; use tower_lsp::{jsonrpc, Client}; /// `ServerState` is the primary mutable state of the language server @@ -21,6 +21,8 @@ pub struct ServerState { pub(crate) config: Arc>, pub(crate) keyword_docs: Arc, pub(crate) sessions: Arc, + pub(crate) retrigger_compilation: Arc, + pub(crate) is_compiling: RwLock, } impl Default for ServerState { @@ -30,6 +32,8 @@ impl Default for ServerState { config: Arc::new(RwLock::new(Default::default())), keyword_docs: Arc::new(KeywordDocs::new()), sessions: Arc::new(Sessions(DashMap::new())), + retrigger_compilation: Arc::new(AtomicBool::new(false)), + is_compiling: RwLock::new(false), } } } @@ -88,7 +92,8 @@ impl ServerState { version: Option, session: Arc, ) { - match run_blocking_parse_project(uri.clone(), version, session.clone()).await { + *self.is_compiling.write() = true; + match run_blocking_parse_project(uri.clone(), version, session.clone(), Some(self.retrigger_compilation.clone())).await { Ok(_) => { // Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec // in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics. @@ -108,6 +113,8 @@ impl ServerState { } } } + *self.is_compiling.write() = false; + self.retrigger_compilation.store(false, Ordering::Relaxed); } } @@ -116,6 +123,7 @@ async fn run_blocking_parse_project( uri: Url, version: Option, session: Arc, + retrigger_compilation: Option>, ) -> Result<(), LanguageServerError> { // Acquire a permit to parse the project. If there are none available, return false. This way, // we avoid publishing the same diagnostics multiple times. @@ -139,7 +147,7 @@ async fn run_blocking_parse_project( eprintln!("parse_project: engines_clone: {:?}", now.elapsed()); let now = std::time::Instant::now(); - let parse_result = session::parse_project(&uri, &engines_clone)?; + let parse_result = session::parse_project(&uri, &engines_clone, retrigger_compilation)?; eprintln!("compilation_took: {:?}", now.elapsed()); let now = std::time::Instant::now(); diff --git a/test/src/ir_generation/mod.rs b/test/src/ir_generation/mod.rs index 682473d6a3e..d2095370bea 100644 --- a/test/src/ir_generation/mod.rs +++ b/test/src/ir_generation/mod.rs @@ -226,6 +226,7 @@ pub(super) async fn run(filter_regex: Option<®ex::Regex>, verbose: bool) -> R core_lib.clone(), Some(&bld_cfg), "test_lib", + None, ); let (errors, _warnings) = handler.consume(); if !errors.is_empty() { From e30e5f8c459b4ae26f56715f7b75c97650560f7a Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 14 Dec 2023 11:23:54 +1100 Subject: [PATCH 05/40] use crossbeam channel with only the most recent didChange event --- forc-pkg/src/pkg.rs | 4 - sway-core/src/decl_engine/engine.rs | 8 +- sway-core/src/lib.rs | 17 ++-- sway-core/src/type_system/engine.rs | 7 +- sway-lsp/benches/lsp_benchmarks/mod.rs | 2 +- sway-lsp/src/core/session.rs | 5 +- sway-lsp/src/error.rs | 2 + sway-lsp/src/handlers/notification.rs | 42 ++++++++-- sway-lsp/src/server_state.rs | 112 +++++++++++++++++++++++-- sway-lsp/tests/integration/lsp.rs | 3 +- sway-types/src/source_engine.rs | 7 +- 11 files changed, 161 insertions(+), 48 deletions(-) diff --git a/forc-pkg/src/pkg.rs b/forc-pkg/src/pkg.rs index e70404bc866..d50481e3a2f 100644 --- a/forc-pkg/src/pkg.rs +++ b/forc-pkg/src/pkg.rs @@ -2629,7 +2629,6 @@ pub fn check( )? .include_tests(include_tests); - let now = std::time::Instant::now(); let input = manifest.entry_string()?; let handler = Handler::default(); let programs_res = sway_core::compile_to_ast( @@ -2641,9 +2640,6 @@ pub fn check( &pkg.name, retrigger_compilation.clone(), ); - eprintln!("compile_to_ast took: {:?}", now.elapsed()); - - let programs = match programs_res.as_ref() { Ok(programs) => programs, diff --git a/sway-core/src/decl_engine/engine.rs b/sway-core/src/decl_engine/engine.rs index 44321b3d9c3..9ffd966b9dc 100644 --- a/sway-core/src/decl_engine/engine.rs +++ b/sway-core/src/decl_engine/engine.rs @@ -36,8 +36,7 @@ pub struct DeclEngine { impl Clone for DeclEngine { fn clone(&self) -> Self { - let now = std::time::Instant::now(); - let de = DeclEngine { + DeclEngine { function_slab: self.function_slab.clone(), trait_slab: self.trait_slab.clone(), trait_fn_slab: self.trait_fn_slab.clone(), @@ -50,10 +49,7 @@ impl Clone for DeclEngine { enum_slab: self.enum_slab.clone(), type_alias_slab: self.type_alias_slab.clone(), parents: RwLock::new(self.parents.read().expect("Lock is poisoned").clone()), - }; - - eprintln!("DeclEngine clone: {:?}", now.elapsed()); - de + } } } diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index b49c6543f45..1950f57f85a 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -479,7 +479,7 @@ pub fn parsed_to_ast( package_name, ); - check_should_abort(handler, retrigger_compilation.clone())?; + check_should_abort(handler, retrigger_compilation.clone(), 482)?; let mut typed_program = match typed_program_opt { Ok(typed_program) => typed_program, @@ -529,7 +529,7 @@ pub fn parsed_to_ast( None => (None, None), }; - check_should_abort(handler, retrigger_compilation.clone())?; + check_should_abort(handler, retrigger_compilation.clone(), 532)?; // Perform control flow analysis and extend with any errors. let _ = perform_control_flow_analysis( @@ -540,7 +540,7 @@ pub fn parsed_to_ast( print_graph_url_format, ); - check_should_abort(handler, retrigger_compilation.clone())?; + check_should_abort(handler, retrigger_compilation.clone(), 543)?; // Evaluate const declarations, to allow storage slots initialization with consts. let mut ctx = Context::new(engines.se()); @@ -610,6 +610,8 @@ pub fn compile_to_ast( let query_engine = engines.qe(); let mut metrics = PerformanceData::default(); + check_should_abort(handler, retrigger_compilation.clone(), 613)?; + if let Some(config) = build_config { let path = config.canonical_root_module(); let include_tests = config.include_tests; @@ -627,7 +629,7 @@ pub fn compile_to_ast( }; } - check_should_abort(handler, retrigger_compilation.clone())?; + check_should_abort(handler, retrigger_compilation.clone(), 632)?; // Parse the program to a concrete syntax tree (CST). let parse_program_opt = time_expr!( @@ -638,7 +640,7 @@ pub fn compile_to_ast( metrics ); - check_should_abort(handler, retrigger_compilation.clone())?; + check_should_abort(handler, retrigger_compilation.clone(), 643)?; let (lexed_program, mut parsed_program) = match parse_program_opt { Ok(modules) => modules, @@ -673,7 +675,7 @@ pub fn compile_to_ast( metrics ); - check_should_abort(handler, retrigger_compilation.clone())?; + check_should_abort(handler, retrigger_compilation.clone(), 678)?; handler.dedup(); @@ -960,9 +962,10 @@ fn module_return_path_analysis( } } -fn check_should_abort(handler: &Handler, retrigger_compilation: Option>) -> Result<(), ErrorEmitted> { +fn check_should_abort(handler: &Handler, retrigger_compilation: Option>, line: u32) -> Result<(), ErrorEmitted> { if let Some(ref retrigger_compilation) = retrigger_compilation { if retrigger_compilation.load(Ordering::Relaxed) { + eprintln!("Aborting compilation due to retrigger as line {}.", line); return Err(handler.cancel()); } } diff --git a/sway-core/src/type_system/engine.rs b/sway-core/src/type_system/engine.rs index 19ed784906d..c6049d19b27 100644 --- a/sway-core/src/type_system/engine.rs +++ b/sway-core/src/type_system/engine.rs @@ -24,14 +24,11 @@ pub struct TypeEngine { impl Clone for TypeEngine { fn clone(&self) -> Self { - let now = std::time::Instant::now(); - let te = TypeEngine { + TypeEngine { slab: self.slab.clone(), slab_source_ids: self.slab_source_ids.clone(), id_map: RwLock::new(self.id_map.read().expect("Lock is poisoned").clone()), - }; - eprintln!("TypeEngine clone: {:?}", now.elapsed()); - te + } } } diff --git a/sway-lsp/benches/lsp_benchmarks/mod.rs b/sway-lsp/benches/lsp_benchmarks/mod.rs index f21fb3e8ae2..cf4bf9827f0 100644 --- a/sway-lsp/benches/lsp_benchmarks/mod.rs +++ b/sway-lsp/benches/lsp_benchmarks/mod.rs @@ -12,7 +12,7 @@ pub async fn compile_test_project() -> (Url, Arc) { let uri = Url::from_file_path(benchmark_dir().join("src/main.sw")).unwrap(); session.handle_open_file(&uri).await; // Compile the project and write the parse result to the session - let parse_result = session::parse_project(&uri, &session.engines.read()).unwrap(); + let parse_result = session::parse_project(&uri, &session.engines.read(), None).unwrap(); session.write_parse_result(parse_result); (uri, Arc::new(session)) } diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index cc2b06d032d..cbe98399040 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -413,20 +413,17 @@ pub(crate) fn build_plan(uri: &Url) -> Result { ManifestFile::from_dir(&manifest_dir).map_err(|_| DocumentError::ManifestFileNotFound { dir: uri.path().into(), })?; - let member_manifests = manifest .member_manifests() .map_err(|_| DocumentError::MemberManifestsFailed { dir: uri.path().into(), })?; - let lock_path = manifest .lock_path() .map_err(|_| DocumentError::ManifestsLockPathFailed { dir: uri.path().into(), })?; - // TODO: Either we want LSP to deploy a local node in the background or we want this to // point to Fuel operated IPFS node. let ipfs_node = pkg::source::IPFSNode::Local; @@ -544,7 +541,7 @@ pub fn parse_project(uri: &Url, engines: &Engines, retrigger_compilation: Option token_map, metrics, } = traverse(results, engines)?; - let (lexed, parsed, typed) = programs.expect("Programs should be populated at this point."); + let (lexed, parsed, typed) = programs.ok_or(LanguageServerError::ProgramsIsNone)?; Ok(ParseResult { diagnostics, token_map, diff --git a/sway-lsp/src/error.rs b/sway-lsp/src/error.rs index b408d20ee39..c5277210537 100644 --- a/sway-lsp/src/error.rs +++ b/sway-lsp/src/error.rs @@ -20,6 +20,8 @@ pub enum LanguageServerError { FailedToParse, #[error("Error formatting document: {0}")] FormatError(FormatterError), + #[error("No Programs were returned from the compiler")] + ProgramsIsNone, #[error("Unable to acquire a semaphore permit for parsing")] UnableToAcquirePermit, } diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index 0fec656da27..38e8cb708b9 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -33,6 +33,7 @@ pub async fn handle_did_change_text_document( state: &ServerState, params: DidChangeTextDocumentParams, ) -> Result<(), LanguageServerError> { + eprintln!("did change text document: version: {:?}", params.text_document.version); document::mark_file_as_dirty(¶ms.text_document.uri).await?; let (uri, session) = state .sessions @@ -41,17 +42,40 @@ pub async fn handle_did_change_text_document( session .write_changes_to_file(&uri, params.content_changes) .await?; - if *state.is_compiling.read() { + if state.is_compiling.load(Ordering::Relaxed) { + eprintln!("retrigger compilation!"); state.retrigger_compilation.store(true, Ordering::Relaxed); } - state - .parse_project( - uri, - params.text_document.uri, - Some(params.text_document.version), - session.clone(), - ) - .await; + + // let _ = state.watch_tx.as_ref().unwrap().send(crate::server_state::Shared { + // session: Some(session.clone()), + // uri: Some(uri.clone()), + // version: Some(params.text_document.version), + // }); + + if let Some(tx) = state.mpsc_tx.as_ref() { + // If channel is full, remove the old value so the compilation thread only + // gets the latest value. + if tx.is_full() { + eprintln!("channel is full!"); + let _ = state.mpsc_rx.as_ref().unwrap().try_recv(); + } + + let _ = tx.send(crate::server_state::Shared { + session: Some(session.clone()), + uri: Some(uri.clone()), + version: Some(params.text_document.version), + }); + } + + // state + // .parse_project( + // uri, + // params.text_document.uri, + // Some(params.text_document.version), + // session.clone(), + // ) + // .await; Ok(()) } diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 7f2b9c55c0c..952160463c2 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -8,12 +8,15 @@ use crate::{ utils::debug, utils::keyword_docs::KeywordDocs, }; +use crossbeam_channel::{Sender, Receiver}; use dashmap::DashMap; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; +use sway_core::Engines; use std::{path::PathBuf, sync::{Arc, atomic::{AtomicBool, Ordering}}}; use tower_lsp::{jsonrpc, Client}; +use tokio::sync::watch; /// `ServerState` is the primary mutable state of the language server pub struct ServerState { @@ -22,7 +25,10 @@ pub struct ServerState { pub(crate) keyword_docs: Arc, pub(crate) sessions: Arc, pub(crate) retrigger_compilation: Arc, - pub(crate) is_compiling: RwLock, + pub(crate) is_compiling: Arc, + pub(crate) watch_tx: Option>, + pub(crate) mpsc_tx: Option>, + pub(crate) mpsc_rx: Option>, } impl Default for ServerState { @@ -33,17 +39,111 @@ impl Default for ServerState { keyword_docs: Arc::new(KeywordDocs::new()), sessions: Arc::new(Sessions(DashMap::new())), retrigger_compilation: Arc::new(AtomicBool::new(false)), - is_compiling: RwLock::new(false), + is_compiling: Arc::new(AtomicBool::new(false)), + watch_tx: None, + mpsc_tx: None, + mpsc_rx: None, } } } +#[derive(Debug, Default)] +pub struct Shared { + pub session: Option>, + pub uri: Option, + pub version: Option, +} + +fn reset_compilation_state(is_compiling: Arc, retrigger_compilation: Arc) { + is_compiling.store(false, Ordering::Relaxed); + retrigger_compilation.store(false, Ordering::Relaxed); +} + impl ServerState { pub fn new(client: Client) -> ServerState { - ServerState { + eprintln!("ServerState::new"); + + let (mpsc_tx, mpsc_rx) = crossbeam_channel::bounded(1); + + let (watch_tx, mut watch_rx) = watch::channel(Default::default()); + let state = ServerState { client: Some(client), + watch_tx: Some(watch_tx), + mpsc_tx: Some(mpsc_tx), + mpsc_rx: Some(mpsc_rx.clone()), ..Default::default() - } + }; + + // let is_compiling = state.is_compiling.clone(); + // let retrigger_compilation = state.retrigger_compilation.clone(); + // tokio::spawn(async move { + // eprintln!("spawning compilation thread"); + // while watch_rx.changed().await.is_ok() { + // eprintln!("new compilation request"); + // is_compiling.store(true, Ordering::Relaxed); + + // // let(version, uri, session, engines_clone) = { + // // let shared = watch_rx.borrow(); + // // let version = shared.version.unwrap(); + // // let uri = shared.uri.as_ref().unwrap(); + // // let session = shared.session.as_ref().unwrap(); + // // let engines_clone = session.engines.read().clone(); + // // (version.clone(), uri.clone(), session.clone(), engines_clone) + // // }; + + // let uri = watch_rx.borrow().uri.as_ref().unwrap().clone(); + // let version = watch_rx.borrow().version.unwrap(); + + // eprintln!("starting parsing project: version: {:?}", version); + // let parse_result = match session::parse_project(&uri, &Engines::default(), Some(retrigger_compilation.clone())) { + // Ok(parse_result) => parse_result, + // Err(err) => { + // eprintln!("{:?}", err); + // is_compiling.store(false, Ordering::Relaxed); + // retrigger_compilation.store(false, Ordering::Relaxed); + // return; + // }, + // }; + // eprintln!("finished parsing project: version: {:?}", version); + + // //*session.engines.write() = engines_clone; + // //session.write_parse_result(parse_result); + + // is_compiling.store(false, Ordering::Relaxed); + // retrigger_compilation.store(false, Ordering::Relaxed); + // } + // }); + + let is_compiling = state.is_compiling.clone(); + let retrigger_compilation = state.retrigger_compilation.clone(); + std::thread::spawn(move || { + while let Ok(shared) = mpsc_rx.recv() { + eprintln!("new compilation request"); + is_compiling.store(true, Ordering::Relaxed); + + let uri = shared.uri.as_ref().unwrap().clone(); + let version = shared.version.unwrap(); + let session = shared.session.as_ref().unwrap().clone(); + let engines_clone = session.engines.read().clone(); + + eprintln!("starting parsing project: version: {:?}", version); + match session::parse_project(&uri, &engines_clone, Some(retrigger_compilation.clone())) { + Ok(parse_result) => { + *session.engines.write() = engines_clone; + session.write_parse_result(parse_result); + reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone()); + }, + Err(err) => { + eprintln!("{:?}", err); + reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone()); + continue; + }, + } + eprintln!("finished parsing project: version: {:?}", version); + } + }); + + state } pub fn shutdown_server(&self) -> jsonrpc::Result<()> { @@ -92,7 +192,7 @@ impl ServerState { version: Option, session: Arc, ) { - *self.is_compiling.write() = true; + self.is_compiling.store(true, Ordering::Relaxed); match run_blocking_parse_project(uri.clone(), version, session.clone(), Some(self.retrigger_compilation.clone())).await { Ok(_) => { // Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec @@ -113,7 +213,7 @@ impl ServerState { } } } - *self.is_compiling.write() = false; + self.is_compiling.store(false, Ordering::Relaxed); self.retrigger_compilation.store(false, Ordering::Relaxed); } } diff --git a/sway-lsp/tests/integration/lsp.rs b/sway-lsp/tests/integration/lsp.rs index e6ee62b7404..d56654550b5 100644 --- a/sway-lsp/tests/integration/lsp.rs +++ b/sway-lsp/tests/integration/lsp.rs @@ -91,11 +91,12 @@ pub(crate) async fn did_open_notification( pub(crate) async fn did_change_request( service: &mut LspService, uri: &Url, + version: i32, ) -> Request { let params = json!({ "textDocument": { "uri": uri, - "version": 2 + "version": version, }, "contentChanges": [ { diff --git a/sway-types/src/source_engine.rs b/sway-types/src/source_engine.rs index 1cdc3413738..40810f8152a 100644 --- a/sway-types/src/source_engine.rs +++ b/sway-types/src/source_engine.rs @@ -26,17 +26,14 @@ pub struct SourceEngine { impl Clone for SourceEngine { fn clone(&self) -> Self { - let now = std::time::Instant::now(); - let se = SourceEngine { + SourceEngine { next_source_id: RwLock::new(*self.next_source_id.read().expect("Lock is poisoned")), path_to_source_map: RwLock::new(self.path_to_source_map.read().expect("Lock is poisoned").clone()), source_to_path_map: RwLock::new(self.source_to_path_map.read().expect("Lock is poisoned").clone()), next_module_id: RwLock::new(*self.next_module_id.read().expect("Lock is poisoned")), path_to_module_map: RwLock::new(self.path_to_module_map.read().expect("Lock is poisoned").clone()), module_to_sources_map: RwLock::new(self.module_to_sources_map.read().expect("Lock is poisoned").clone()), - }; - eprintln!("SourceEngine clone: {:?}", now.elapsed()); - se + } } } From 7c13863927621acc3984b143b5538f96f9e20542 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 14 Dec 2023 12:16:16 +1100 Subject: [PATCH 06/40] only publish diagnostics on open and save events --- sway-lsp/src/core/session.rs | 7 ++ sway-lsp/src/handlers/notification.rs | 13 +-- sway-lsp/src/server_state.rs | 137 ++++++++++---------------- 3 files changed, 66 insertions(+), 91 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index cbe98399040..b6837730b61 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -164,6 +164,13 @@ impl Session { self.metrics.insert(*s, t.clone()); }); + let (errors, warnings) = res.diagnostics; + *self.diagnostics.write() = capabilities::diagnostic::get_diagnostics( + &warnings, + &errors, + self.engines.read().se(), + ); + self.create_runnables( &res.typed, self.engines.read().de(), diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index 38e8cb708b9..6eeee59fab9 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -23,8 +23,9 @@ pub async fn handle_did_open_text_document( // as the workspace is already compiled. if session.token_map().is_empty() { state - .parse_project(uri, params.text_document.uri, None, session.clone()) + .parse_project(&uri, ¶ms.text_document.uri, None, session.clone()) .await; + state.publish_diagnostics(uri, params.text_document.uri, session).await; } Ok(()) } @@ -47,12 +48,6 @@ pub async fn handle_did_change_text_document( state.retrigger_compilation.store(true, Ordering::Relaxed); } - // let _ = state.watch_tx.as_ref().unwrap().send(crate::server_state::Shared { - // session: Some(session.clone()), - // uri: Some(uri.clone()), - // version: Some(params.text_document.version), - // }); - if let Some(tx) = state.mpsc_tx.as_ref() { // If channel is full, remove the old value so the compilation thread only // gets the latest value. @@ -90,8 +85,10 @@ pub(crate) async fn handle_did_save_text_document( .await?; session.sync.resync()?; state - .parse_project(uri, params.text_document.uri, None, session.clone()) + .parse_project(&uri, ¶ms.text_document.uri, None, session.clone()) .await; + + state.publish_diagnostics(uri, params.text_document.uri, session).await; Ok(()) } diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 952160463c2..b1a53fbda27 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -1,7 +1,6 @@ //! The context or environment in which the language server functions. use crate::{ - capabilities::diagnostic::get_diagnostics, config::{Config, Warnings}, core::session::{self, Session}, error::{DirectoryError, DocumentError, LanguageServerError}, @@ -13,10 +12,8 @@ use dashmap::DashMap; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; -use sway_core::Engines; use std::{path::PathBuf, sync::{Arc, atomic::{AtomicBool, Ordering}}}; use tower_lsp::{jsonrpc, Client}; -use tokio::sync::watch; /// `ServerState` is the primary mutable state of the language server pub struct ServerState { @@ -26,7 +23,6 @@ pub struct ServerState { pub(crate) sessions: Arc, pub(crate) retrigger_compilation: Arc, pub(crate) is_compiling: Arc, - pub(crate) watch_tx: Option>, pub(crate) mpsc_tx: Option>, pub(crate) mpsc_rx: Option>, } @@ -40,7 +36,6 @@ impl Default for ServerState { sessions: Arc::new(Sessions(DashMap::new())), retrigger_compilation: Arc::new(AtomicBool::new(false)), is_compiling: Arc::new(AtomicBool::new(false)), - watch_tx: None, mpsc_tx: None, mpsc_rx: None, } @@ -64,56 +59,13 @@ impl ServerState { eprintln!("ServerState::new"); let (mpsc_tx, mpsc_rx) = crossbeam_channel::bounded(1); - - let (watch_tx, mut watch_rx) = watch::channel(Default::default()); let state = ServerState { client: Some(client), - watch_tx: Some(watch_tx), mpsc_tx: Some(mpsc_tx), mpsc_rx: Some(mpsc_rx.clone()), ..Default::default() }; - // let is_compiling = state.is_compiling.clone(); - // let retrigger_compilation = state.retrigger_compilation.clone(); - // tokio::spawn(async move { - // eprintln!("spawning compilation thread"); - // while watch_rx.changed().await.is_ok() { - // eprintln!("new compilation request"); - // is_compiling.store(true, Ordering::Relaxed); - - // // let(version, uri, session, engines_clone) = { - // // let shared = watch_rx.borrow(); - // // let version = shared.version.unwrap(); - // // let uri = shared.uri.as_ref().unwrap(); - // // let session = shared.session.as_ref().unwrap(); - // // let engines_clone = session.engines.read().clone(); - // // (version.clone(), uri.clone(), session.clone(), engines_clone) - // // }; - - // let uri = watch_rx.borrow().uri.as_ref().unwrap().clone(); - // let version = watch_rx.borrow().version.unwrap(); - - // eprintln!("starting parsing project: version: {:?}", version); - // let parse_result = match session::parse_project(&uri, &Engines::default(), Some(retrigger_compilation.clone())) { - // Ok(parse_result) => parse_result, - // Err(err) => { - // eprintln!("{:?}", err); - // is_compiling.store(false, Ordering::Relaxed); - // retrigger_compilation.store(false, Ordering::Relaxed); - // return; - // }, - // }; - // eprintln!("finished parsing project: version: {:?}", version); - - // //*session.engines.write() = engines_clone; - // //session.write_parse_result(parse_result); - - // is_compiling.store(false, Ordering::Relaxed); - // retrigger_compilation.store(false, Ordering::Relaxed); - // } - // }); - let is_compiling = state.is_compiling.clone(); let retrigger_compilation = state.retrigger_compilation.clone(); std::thread::spawn(move || { @@ -155,40 +107,10 @@ impl ServerState { Ok(()) } - pub(crate) fn diagnostics(&self, uri: &Url, session: Arc) -> Vec { - let mut diagnostics_to_publish = vec![]; - let config = &self.config.read(); - let tokens = session.token_map().tokens_for_file(uri); - match config.debug.show_collected_tokens_as_warnings { - // If collected_tokens_as_warnings is Parsed or Typed, - // take over the normal error and warning display behavior - // and instead show the either the parsed or typed tokens as warnings. - // This is useful for debugging the lsp parser. - Warnings::Parsed => { - diagnostics_to_publish = debug::generate_warnings_for_parsed_tokens(tokens) - } - Warnings::Typed => { - diagnostics_to_publish = debug::generate_warnings_for_typed_tokens(tokens) - } - Warnings::Default => { - let diagnostics_map = session.wait_for_parsing(); - if let Some(diagnostics) = diagnostics_map.get(&PathBuf::from(uri.path())) { - if config.diagnostic.show_warnings { - diagnostics_to_publish.extend(diagnostics.warnings.clone()); - } - if config.diagnostic.show_errors { - diagnostics_to_publish.extend(diagnostics.errors.clone()); - } - } - } - } - diagnostics_to_publish - } - pub(crate) async fn parse_project( &self, - uri: Url, - workspace_uri: Url, + uri: &Url, + workspace_uri: &Url, version: Option, session: Arc, ) { @@ -216,8 +138,59 @@ impl ServerState { self.is_compiling.store(false, Ordering::Relaxed); self.retrigger_compilation.store(false, Ordering::Relaxed); } + + pub(crate) async fn publish_diagnostics( + &self, + uri: Url, + workspace_uri: Url, + session: Arc, + ) { + // Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec + // in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics. + if let Some(client) = self.client.as_ref() { + client + .publish_diagnostics( + workspace_uri.clone(), + self.diagnostics(&uri, session), + None, + ) + .await; + } + } + + fn diagnostics(&self, uri: &Url, session: Arc) -> Vec { + let mut diagnostics_to_publish = vec![]; + let config = &self.config.read(); + let tokens = session.token_map().tokens_for_file(uri); + match config.debug.show_collected_tokens_as_warnings { + // If collected_tokens_as_warnings is Parsed or Typed, + // take over the normal error and warning display behavior + // and instead show the either the parsed or typed tokens as warnings. + // This is useful for debugging the lsp parser. + Warnings::Parsed => { + diagnostics_to_publish = debug::generate_warnings_for_parsed_tokens(tokens) + } + Warnings::Typed => { + diagnostics_to_publish = debug::generate_warnings_for_typed_tokens(tokens) + } + Warnings::Default => { + let diagnostics_map = session.wait_for_parsing(); + if let Some(diagnostics) = diagnostics_map.get(&PathBuf::from(uri.path())) { + if config.diagnostic.show_warnings { + diagnostics_to_publish.extend(diagnostics.warnings.clone()); + } + if config.diagnostic.show_errors { + diagnostics_to_publish.extend(diagnostics.errors.clone()); + } + } + } + } + diagnostics_to_publish + } } + + /// Runs parse_project in a blocking thread, because parsing is not async. async fn run_blocking_parse_project( uri: Url, @@ -232,7 +205,7 @@ async fn run_blocking_parse_project( } tokio::task::spawn_blocking(move || { // Lock the diagnostics result to prevent multiple threads from parsing the project at the same time. - let mut diagnostics = session.diagnostics.write(); + //let mut diagnostics = session.diagnostics.write(); if let Some(version) = version { // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. @@ -254,9 +227,7 @@ async fn run_blocking_parse_project( *session.engines.write() = engines_clone; eprintln!("parse_project: engines_write: {:?}", now.elapsed()); - let (errors, warnings) = parse_result.diagnostics.clone(); session.write_parse_result(parse_result); - *diagnostics = get_diagnostics(&warnings, &errors, session.engines.read().se()); Ok(()) }) .await From 38bedd844cfb474257dc98e979a81b29ef09ed72 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 15 Dec 2023 17:05:07 +1100 Subject: [PATCH 07/40] eow wip --- sway-core/src/lib.rs | 9 +- sway-core/src/query_engine/mod.rs | 2 +- sway-lsp/benches/lsp_benchmarks/compile.rs | 6 +- sway-lsp/src/capabilities/code_actions/mod.rs | 3 +- sway-lsp/src/capabilities/hover/mod.rs | 5 +- sway-lsp/src/capabilities/inlay_hints.rs | 7 +- sway-lsp/src/capabilities/rename.rs | 10 +- sway-lsp/src/core/session.rs | 24 +- sway-lsp/src/handlers/notification.rs | 79 ++++--- sway-lsp/src/handlers/request.rs | 21 +- sway-lsp/src/server_state.rs | 216 ++++++++++-------- sway-lsp/tests/lib.rs | 34 ++- 12 files changed, 220 insertions(+), 196 deletions(-) diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 1950f57f85a..0ec651bf9bf 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -618,7 +618,7 @@ pub fn compile_to_ast( // Check if we can re-use the data in the cache. if is_parse_module_cache_up_to_date(engines, &path, include_tests) { - let mut entry = query_engine.get_programs_cache_entry(&path).unwrap(); + let mut entry = query_engine.get_programs_cache_entry(&path).expect(&format!("unable to find entry in cache at path {:?}", &path)); entry.programs.metrics.reused_modules += 1; let (warnings, errors) = entry.handler_data; @@ -963,11 +963,16 @@ fn module_return_path_analysis( } fn check_should_abort(handler: &Handler, retrigger_compilation: Option>, line: u32) -> Result<(), ErrorEmitted> { + eprintln!("check_should_abort at line {}.", line); if let Some(ref retrigger_compilation) = retrigger_compilation { - if retrigger_compilation.load(Ordering::Relaxed) { + if retrigger_compilation.load(Ordering::SeqCst) { eprintln!("Aborting compilation due to retrigger as line {}.", line); return Err(handler.cancel()); + } else { + eprintln!("Continuing compilation at line {}.", line); } + } else { + eprintln!("retrigger_compilation is None at line {}.", line); } Ok(()) } diff --git a/sway-core/src/query_engine/mod.rs b/sway-core/src/query_engine/mod.rs index 1a8edab4265..8a0d84f887d 100644 --- a/sway-core/src/query_engine/mod.rs +++ b/sway-core/src/query_engine/mod.rs @@ -67,7 +67,7 @@ impl QueryEngine { } pub fn get_programs_cache_entry(&self, path: &Arc) -> Option { - let cache = self.programs_cache.read().unwrap(); + let cache = self.programs_cache.read().expect("Failed to read programs cache"); cache.get(path).cloned() } diff --git a/sway-lsp/benches/lsp_benchmarks/compile.rs b/sway-lsp/benches/lsp_benchmarks/compile.rs index 8983879302b..6a44c0db6a1 100644 --- a/sway-lsp/benches/lsp_benchmarks/compile.rs +++ b/sway-lsp/benches/lsp_benchmarks/compile.rs @@ -18,13 +18,13 @@ fn benchmarks(c: &mut Criterion) { c.bench_function("compile", |b| { b.iter(|| { let engines = Engines::default(); - let _ = black_box(session::compile(&uri, &engines).unwrap()); + let _ = black_box(session::compile(&uri, &engines, None).unwrap()); }) }); c.bench_function("traverse", |b| { let engines = Engines::default(); - let results = black_box(session::compile(&uri, &engines).unwrap()); + let results = black_box(session::compile(&uri, &engines, None).unwrap()); b.iter(|| { let _ = black_box(session::traverse(results.clone(), &engines).unwrap()); }) @@ -33,7 +33,7 @@ fn benchmarks(c: &mut Criterion) { c.bench_function("did_change_with_caching", |b| { b.iter(|| { for _ in 0..NUM_DID_CHANGE_ITERATIONS { - let _ = black_box(session::compile(&uri, &session.engines.read()).unwrap()); + let _ = black_box(session::compile(&uri, &session.engines.read(), None).unwrap()); } }) }); diff --git a/sway-lsp/src/capabilities/code_actions/mod.rs b/sway-lsp/src/capabilities/code_actions/mod.rs index e80e0ddf1e4..cf43e0dd870 100644 --- a/sway-lsp/src/capabilities/code_actions/mod.rs +++ b/sway-lsp/src/capabilities/code_actions/mod.rs @@ -49,13 +49,12 @@ pub fn code_actions( temp_uri: &Url, diagnostics: &Vec, ) -> Option { - let engines = session.engines.read(); let (_, token) = session .token_map() .token_at_position(temp_uri, range.start)?; let ctx = CodeActionContext { - engines: &engines, + engines: &session.engines.read(), tokens: session.token_map(), token: &token, uri, diff --git a/sway-lsp/src/capabilities/hover/mod.rs b/sway-lsp/src/capabilities/hover/mod.rs index 1afc7d44075..f875410745e 100644 --- a/sway-lsp/src/capabilities/hover/mod.rs +++ b/sway-lsp/src/capabilities/hover/mod.rs @@ -50,8 +50,7 @@ pub fn hover_data( }); } - let engines = session.engines.read(); - let (decl_ident, decl_token) = match token.declared_token_ident(&engines) { + let (decl_ident, decl_token) = match token.declared_token_ident(&session.engines.read()) { Some(decl_ident) => { let decl_token = session .token_map() @@ -65,7 +64,7 @@ pub fn hover_data( None => (ident, token), }; - let contents = hover_format(session.clone(), &engines, &decl_token, &decl_ident.name); + let contents = hover_format(session.clone(), &session.engines.read(), &decl_token, &decl_ident.name); Some(lsp_types::Hover { contents, range: Some(range), diff --git a/sway-lsp/src/capabilities/inlay_hints.rs b/sway-lsp/src/capabilities/inlay_hints.rs index 312ea51bdc4..592a0c11c6e 100644 --- a/sway-lsp/src/capabilities/inlay_hints.rs +++ b/sway-lsp/src/capabilities/inlay_hints.rs @@ -38,9 +38,6 @@ pub fn inlay_hints( return None; } - let engines = session.engines.read(); - let type_engine = engines.te(); - let hints: Vec = session .token_map() .tokens_for_file(uri) @@ -63,7 +60,7 @@ pub fn inlay_hints( }) }) .filter_map(|var| { - let type_info = type_engine.get(var.type_ascription.type_id); + let type_info = session.engines.read().te().get(var.type_ascription.type_id); match &*type_info { TypeInfo::Unknown | TypeInfo::UnknownGeneric { .. } => None, _ => Some(var), @@ -72,7 +69,7 @@ pub fn inlay_hints( .map(|var| { let range = get_range_from_span(&var.name.span()); let kind = InlayKind::TypeHint; - let label = format!("{}", engines.help_out(var.type_ascription)); + let label = format!("{}", session.engines.read().help_out(var.type_ascription)); let inlay_hint = InlayHint { range, kind, label }; self::inlay_hint(config.render_colons, inlay_hint) }) diff --git a/sway-lsp/src/capabilities/rename.rs b/sway-lsp/src/capabilities/rename.rs index 3f6f19e8d45..b277b4789fd 100644 --- a/sway-lsp/src/capabilities/rename.rs +++ b/sway-lsp/src/capabilities/rename.rs @@ -48,18 +48,16 @@ pub fn rename( )); } - let engines = session.engines.read(); - // If the token is a function, find the parent declaration // and collect idents for all methods of ABI Decl, Trait Decl, and Impl Trait let map_of_changes: HashMap> = (if token.kind == SymbolKind::Function { - find_all_methods_for_decl(&session, &engines, &url, position)? + find_all_methods_for_decl(&session, &session.engines.read(), &url, position)? } else { // otherwise, just find all references of the token in the token map session .token_map() .iter() - .all_references_of_token(&token, &engines) + .all_references_of_token(&token, &session.engines.read()) .map(|(ident, _)| ident) .collect::>() }) @@ -109,11 +107,9 @@ pub fn prepare_rename( .token_at_position(&url, position) .ok_or(RenameError::TokenNotFound)?; - let engines = session.engines.read(); - // Only let through tokens that are in the users workspace. // tokens that are external to the users workspace cannot be renamed. - let _ = is_token_in_workspace(&session, &engines, &token)?; + let _ = is_token_in_workspace(&session, &session.engines.read(), &token)?; // Make sure we don't allow renaming of tokens that // are keywords or intrinsics. diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index b6837730b61..29016500ea7 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -132,17 +132,12 @@ impl Session { &self.token_map } - /// Wait for the cached [DiagnosticMap] to be unlocked after parsing and return a copy. - pub fn wait_for_parsing(&self) -> DiagnosticMap { - self.diagnostics.read().clone() - } - /// Clean up memory in the [TypeEngine] and [DeclEngine] for the user's workspace. - pub fn garbage_collect(&self) -> Result<(), LanguageServerError> { + pub fn garbage_collect(&self, engines: &mut Engines) -> Result<(), LanguageServerError> { let path = self.sync.temp_dir()?; - let module_id = { self.engines.read().se().get_module_id(&path) }; + let module_id = { engines.se().get_module_id(&path) }; if let Some(module_id) = module_id { - self.engines.write().clear_module(&module_id); + engines.clear_module(&module_id); } Ok(()) } @@ -165,6 +160,7 @@ impl Session { }); let (errors, warnings) = res.diagnostics; + eprintln!("THREAD | success, about to write diagnostics"); *self.diagnostics.write() = capabilities::diagnostic::get_diagnostics( &warnings, &errors, @@ -176,6 +172,7 @@ impl Session { self.engines.read().de(), self.engines.read().se(), ); + eprintln!("THREAD | success, about to write programs"); self.compiled_program.write().lexed = Some(res.lexed); self.compiled_program.write().parsed = Some(res.parsed); self.compiled_program.write().typed = Some(res.typed); @@ -183,11 +180,10 @@ impl Session { pub fn token_ranges(&self, url: &Url, position: Position) -> Option> { let (_, token) = self.token_map.token_at_position(url, position)?; - let engines = self.engines.read(); let mut token_ranges: Vec<_> = self .token_map .tokens_for_file(url) - .all_references_of_token(&token, &engines) + .all_references_of_token(&token, &self.engines.read()) .map(|(ident, _)| ident.range) .collect(); @@ -200,10 +196,9 @@ impl Session { uri: Url, position: Position, ) -> Option { - let engines = self.engines.read(); self.token_map .token_at_position(&uri, position) - .and_then(|(_, token)| token.declared_token_ident(&engines)) + .and_then(|(_, token)| token.declared_token_ident(&self.engines.read())) .and_then(|decl_ident| { decl_ident.path.and_then(|path| { // We use ok() here because we don't care about propagating the error from from_file_path @@ -226,11 +221,10 @@ impl Session { line: position.line, character: position.character - trigger_char.len() as u32 - 1, }; - let engines = self.engines.read(); let (ident_to_complete, _) = self.token_map.token_at_position(uri, shifted_position)?; let fn_tokens = self.token_map - .tokens_at_position(engines.se(), uri, shifted_position, Some(true)); + .tokens_at_position(self.engines.read().se(), uri, shifted_position, Some(true)); let (_, fn_token) = fn_tokens.first()?; let compiled_program = &*self.compiled_program.read(); if let Some(TypedAstToken::TypedFunctionDeclaration(fn_decl)) = fn_token.typed.clone() { @@ -542,12 +536,14 @@ pub fn traverse( /// Parses the project and returns true if the compiler diagnostics are new and should be published. pub fn parse_project(uri: &Url, engines: &Engines, retrigger_compilation: Option>) -> Result { let results = compile(uri, engines, retrigger_compilation)?; + eprintln!("compilation successful, starting traversal"); let TraversalResult { diagnostics, programs, token_map, metrics, } = traverse(results, engines)?; + eprintln!("traversal successful"); let (lexed, parsed, typed) = programs.ok_or(LanguageServerError::ProgramsIsNone)?; Ok(ParseResult { diagnostics, diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index 6eeee59fab9..a3eea18f612 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -1,18 +1,19 @@ //! This module is responsible for implementing handlers for Language Server //! Protocol. This module specifically handles notification messages sent by the Client. -use std::sync::atomic::Ordering; +use std::sync::{atomic::Ordering, Arc}; -use crate::{core::document, error::LanguageServerError, server_state::ServerState}; +use crate::{core::{document, session::Session}, error::LanguageServerError, server_state::ServerState}; use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, - DidSaveTextDocumentParams, FileChangeType, + DidSaveTextDocumentParams, FileChangeType, Url, }; pub async fn handle_did_open_text_document( state: &ServerState, params: DidOpenTextDocumentParams, ) -> Result<(), LanguageServerError> { + eprintln!("did_open_text_document"); let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) @@ -22,14 +23,43 @@ pub async fn handle_did_open_text_document( // Otherwise, don't recompile the project when a new file in the project is opened // as the workspace is already compiled. if session.token_map().is_empty() { - state - .parse_project(&uri, ¶ms.text_document.uri, None, session.clone()) - .await; + send_new_compilation_request(&state, session.clone(), &uri, None); + + eprintln!("did open - waiting for parsing to finish"); + state.wait_for_parsing().await; state.publish_diagnostics(uri, params.text_document.uri, session).await; } Ok(()) } +fn send_new_compilation_request( + state: &ServerState, + session: Arc, + uri: &Url, + version: Option, +) { + if state.is_compiling.load(Ordering::Relaxed) { + eprintln!("retrigger compilation!"); + state.retrigger_compilation.store(true, Ordering::SeqCst); + } + eprintln!("new compilation request - setting is_compiling to true"); + state.is_compiling.store(true, Ordering::Relaxed); + + // If channel is full, remove the old value so the compilation thread only + // gets the latest value. + if state.mpsc_tx.is_full() { + eprintln!("channel is full!"); + let _ = state.mpsc_rx.try_recv(); + } + + eprintln!("sending new compilation request"); + let _ = state.mpsc_tx.send(crate::server_state::Shared { + session: Some(session.clone()), + uri: Some(uri.clone()), + version, + }); +} + pub async fn handle_did_change_text_document( state: &ServerState, params: DidChangeTextDocumentParams, @@ -43,34 +73,7 @@ pub async fn handle_did_change_text_document( session .write_changes_to_file(&uri, params.content_changes) .await?; - if state.is_compiling.load(Ordering::Relaxed) { - eprintln!("retrigger compilation!"); - state.retrigger_compilation.store(true, Ordering::Relaxed); - } - - if let Some(tx) = state.mpsc_tx.as_ref() { - // If channel is full, remove the old value so the compilation thread only - // gets the latest value. - if tx.is_full() { - eprintln!("channel is full!"); - let _ = state.mpsc_rx.as_ref().unwrap().try_recv(); - } - - let _ = tx.send(crate::server_state::Shared { - session: Some(session.clone()), - uri: Some(uri.clone()), - version: Some(params.text_document.version), - }); - } - - // state - // .parse_project( - // uri, - // params.text_document.uri, - // Some(params.text_document.version), - // session.clone(), - // ) - // .await; + send_new_compilation_request(&state, session.clone(), &uri, Some(params.text_document.version)); Ok(()) } @@ -78,16 +81,16 @@ pub(crate) async fn handle_did_save_text_document( state: &ServerState, params: DidSaveTextDocumentParams, ) -> Result<(), LanguageServerError> { + eprintln!("did save text document"); document::remove_dirty_flag(¶ms.text_document.uri).await?; let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await?; session.sync.resync()?; - state - .parse_project(&uri, ¶ms.text_document.uri, None, session.clone()) - .await; - + eprintln!("resynced"); + send_new_compilation_request(&state, session.clone(), &uri, None); + state.wait_for_parsing().await; state.publish_diagnostics(uri, params.text_document.uri, session).await; Ok(()) } diff --git a/sway-lsp/src/handlers/request.rs b/sway-lsp/src/handlers/request.rs index b3e08c70a98..c1fc9b64b94 100644 --- a/sway-lsp/src/handlers/request.rs +++ b/sway-lsp/src/handlers/request.rs @@ -52,13 +52,14 @@ pub async fn handle_document_symbol( state: &ServerState, params: lsp_types::DocumentSymbolParams, ) -> Result> { + eprintln!("document_symbol"); + let _ = state.wait_for_parsing().await; match state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await { - Ok((uri, session)) => { - let _ = session.wait_for_parsing(); + Ok((uri, session)) => { Ok(session .symbol_information(&uri) .map(DocumentSymbolResponse::Flat)) @@ -74,6 +75,7 @@ pub async fn handle_goto_definition( state: &ServerState, params: lsp_types::GotoDefinitionParams, ) -> Result> { + eprintln!("goto_definition"); match state .sessions .uri_and_session_from_workspace(¶ms.text_document_position_params.text_document.uri) @@ -256,13 +258,14 @@ pub async fn handle_code_lens( state: &ServerState, params: lsp_types::CodeLensParams, ) -> Result>> { + eprintln!("code_lens"); + let _ = state.wait_for_parsing().await; match state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await { Ok((url, session)) => { - let _ = session.wait_for_parsing(); Ok(Some(capabilities::code_lens::code_lens(&session, &url))) } Err(err) => { @@ -276,13 +279,14 @@ pub async fn handle_semantic_tokens_full( state: &ServerState, params: SemanticTokensParams, ) -> Result> { + eprintln!("semantic_tokens_full"); + let _ = state.wait_for_parsing().await; match state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await { Ok((uri, session)) => { - let _ = session.wait_for_parsing(); Ok(capabilities::semantic_tokens::semantic_tokens_full( session, &uri, )) @@ -298,13 +302,14 @@ pub(crate) async fn handle_inlay_hints( state: &ServerState, params: InlayHintParams, ) -> Result>> { + eprintln!("inlay_hints"); + let _ = state.wait_for_parsing().await; match state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await { Ok((uri, session)) => { - let _ = session.wait_for_parsing(); let config = &state.config.read().inlay_hints; Ok(capabilities::inlay_hints::inlay_hints( session, @@ -360,8 +365,7 @@ pub async fn handle_show_ast( // Returns true if the current path matches the path of a submodule let path_is_submodule = |ident: &Ident, path: &Option| -> bool { - let engines = session.engines.read(); - ident.span().source_id().map(|p| engines.se().get_path(p)) == *path + ident.span().source_id().map(|p| session.engines.read().se().get_path(p)) == *path }; let ast_path = PathBuf::from(params.save_path.path()); @@ -481,10 +485,9 @@ pub(crate) async fn metrics( .await { Ok((_, session)) => { - let engines = session.engines.read(); let mut metrics = vec![]; for kv in session.metrics.iter() { - let path = engines + let path = session.engines.read() .se() .get_path(kv.key()) .to_string_lossy() diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index b1a53fbda27..1fe3d4d29dd 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -23,22 +23,29 @@ pub struct ServerState { pub(crate) sessions: Arc, pub(crate) retrigger_compilation: Arc, pub(crate) is_compiling: Arc, - pub(crate) mpsc_tx: Option>, - pub(crate) mpsc_rx: Option>, + pub(crate) mpsc_tx: Sender, + pub(crate) mpsc_rx: Arc>, + pub(crate) finished_compilation: Arc, } impl Default for ServerState { fn default() -> Self { - ServerState { + let (mpsc_tx, mpsc_rx) = crossbeam_channel::bounded(1); + + let state = ServerState { client: None, config: Arc::new(RwLock::new(Default::default())), keyword_docs: Arc::new(KeywordDocs::new()), sessions: Arc::new(Sessions(DashMap::new())), retrigger_compilation: Arc::new(AtomicBool::new(false)), is_compiling: Arc::new(AtomicBool::new(false)), - mpsc_tx: None, - mpsc_rx: None, - } + mpsc_tx, + mpsc_rx: Arc::new(mpsc_rx), + finished_compilation: Arc::new(tokio::sync::Notify::new()), + }; + + state.spawn_compilation_thread(); + state } } @@ -49,53 +56,92 @@ pub struct Shared { pub version: Option, } -fn reset_compilation_state(is_compiling: Arc, retrigger_compilation: Arc) { +fn reset_compilation_state( + is_compiling: Arc, + retrigger_compilation: Arc, + finished_compilation: Arc +) { is_compiling.store(false, Ordering::Relaxed); retrigger_compilation.store(false, Ordering::Relaxed); + eprintln!("THREAD | finished compilation, notifying waiters"); + finished_compilation.notify_waiters(); } impl ServerState { pub fn new(client: Client) -> ServerState { eprintln!("ServerState::new"); - - let (mpsc_tx, mpsc_rx) = crossbeam_channel::bounded(1); - let state = ServerState { + ServerState { client: Some(client), - mpsc_tx: Some(mpsc_tx), - mpsc_rx: Some(mpsc_rx.clone()), ..Default::default() - }; + } + } - let is_compiling = state.is_compiling.clone(); - let retrigger_compilation = state.retrigger_compilation.clone(); + pub fn spawn_compilation_thread(&self) { + let is_compiling = self.is_compiling.clone(); + let retrigger_compilation = self.retrigger_compilation.clone(); + let finished_compilation = self.finished_compilation.clone(); + let rx = self.mpsc_rx.clone(); std::thread::spawn(move || { - while let Ok(shared) = mpsc_rx.recv() { - eprintln!("new compilation request"); - is_compiling.store(true, Ordering::Relaxed); - + while let Ok(shared) = rx.recv() { + eprintln!("THREAD | received new compilation request"); let uri = shared.uri.as_ref().unwrap().clone(); - let version = shared.version.unwrap(); + let version = shared.version; let session = shared.session.as_ref().unwrap().clone(); - let engines_clone = session.engines.read().clone(); + let mut engines_clone = session.engines.read().clone(); + + // if let Some(version) = version { + // // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. + // if version % 10 == 0 { + // // Call this on the engines clone so we don't clear types that are still in use + // // and might be needed in the case cancel compilation was triggered. + // if let Err(err) = session.garbage_collect(&mut engines_clone) { + // tracing::error!("Unable to perform garbage collection: {}", err.to_string()); + // } + // } + // } - eprintln!("starting parsing project: version: {:?}", version); + eprintln!("THREAD | starting parsing project: version: {:?}", version); match session::parse_project(&uri, &engines_clone, Some(retrigger_compilation.clone())) { Ok(parse_result) => { + eprintln!("THREAD | engines_write: {:?}", version); *session.engines.write() = engines_clone; + eprintln!("THREAD | success, about to write parse results: {:?}", version); session.write_parse_result(parse_result); - reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone()); + reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone()); }, Err(err) => { eprintln!("{:?}", err); - reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone()); + reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone()); continue; }, } - eprintln!("finished parsing project: version: {:?}", version); + eprintln!("THREAD | finished parsing project: version: {:?}", version); } }); - - state + } + + /// Waits asynchronously for the `is_compiling` flag to become false. + /// + /// This function checks the state of `is_compiling`, and if it's true, + /// it awaits on a notification. Once notified, it checks again, repeating + /// this process until `is_compiling` becomes false. + pub async fn wait_for_parsing(&self) { + loop { + eprintln!("are we still compiling?"); + if !self.is_compiling.load(Ordering::Relaxed) { + eprintln!("compilation is finished, lets check if there are pending compilation requests"); + if self.mpsc_rx.is_empty() { + eprintln!("no pending compilation work, safe to break"); + break; + } else { + eprintln!("there is pending compilation work, lets wait for it to finish"); + } + } else { + eprintln!("we are still compiling, lets wait to be notified"); + } + self.finished_compilation.notified().await; + eprintln!("we were notified, lets check if we are still compiling"); + } } pub fn shutdown_server(&self) -> jsonrpc::Result<()> { @@ -107,58 +153,27 @@ impl ServerState { Ok(()) } - pub(crate) async fn parse_project( - &self, - uri: &Url, - workspace_uri: &Url, - version: Option, - session: Arc, - ) { - self.is_compiling.store(true, Ordering::Relaxed); - match run_blocking_parse_project(uri.clone(), version, session.clone(), Some(self.retrigger_compilation.clone())).await { - Ok(_) => { - // Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec - // in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics. - if let Some(client) = self.client.as_ref() { - client - .publish_diagnostics( - workspace_uri.clone(), - self.diagnostics(&uri, session), - None, - ) - .await; - } - } - Err(err) => { - if matches!(err, LanguageServerError::FailedToParse) { - tracing::error!("Error parsing project: {:?}", err); - } - } - } - self.is_compiling.store(false, Ordering::Relaxed); - self.retrigger_compilation.store(false, Ordering::Relaxed); - } - pub(crate) async fn publish_diagnostics( &self, uri: Url, workspace_uri: Url, session: Arc, ) { + let diagnostics = self.diagnostics(&uri, session.clone()).await; // Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec // in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics. if let Some(client) = self.client.as_ref() { client .publish_diagnostics( workspace_uri.clone(), - self.diagnostics(&uri, session), + diagnostics, None, ) .await; } } - fn diagnostics(&self, uri: &Url, session: Arc) -> Vec { + async fn diagnostics(&self, uri: &Url, session: Arc) -> Vec { let mut diagnostics_to_publish = vec![]; let config = &self.config.read(); let tokens = session.token_map().tokens_for_file(uri); @@ -174,8 +189,7 @@ impl ServerState { diagnostics_to_publish = debug::generate_warnings_for_typed_tokens(tokens) } Warnings::Default => { - let diagnostics_map = session.wait_for_parsing(); - if let Some(diagnostics) = diagnostics_map.get(&PathBuf::from(uri.path())) { + if let Some(diagnostics) = session.diagnostics.read().get(&PathBuf::from(uri.path())) { if config.diagnostic.show_warnings { diagnostics_to_publish.extend(diagnostics.warnings.clone()); } @@ -191,48 +205,48 @@ impl ServerState { -/// Runs parse_project in a blocking thread, because parsing is not async. -async fn run_blocking_parse_project( - uri: Url, - version: Option, - session: Arc, - retrigger_compilation: Option>, -) -> Result<(), LanguageServerError> { - // Acquire a permit to parse the project. If there are none available, return false. This way, - // we avoid publishing the same diagnostics multiple times. - if session.parse_permits.try_acquire().is_err() { - return Err(LanguageServerError::UnableToAcquirePermit); - } - tokio::task::spawn_blocking(move || { - // Lock the diagnostics result to prevent multiple threads from parsing the project at the same time. - //let mut diagnostics = session.diagnostics.write(); +// /// Runs parse_project in a blocking thread, because parsing is not async. +// async fn run_blocking_parse_project( +// uri: Url, +// version: Option, +// session: Arc, +// retrigger_compilation: Option>, +// ) -> Result<(), LanguageServerError> { +// // Acquire a permit to parse the project. If there are none available, return false. This way, +// // we avoid publishing the same diagnostics multiple times. +// if session.parse_permits.try_acquire().is_err() { +// return Err(LanguageServerError::UnableToAcquirePermit); +// } +// tokio::task::spawn_blocking(move || { +// // Lock the diagnostics result to prevent multiple threads from parsing the project at the same time. +// let _ = session.diagnostics.write(); - if let Some(version) = version { - // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. - if version % 10 == 0 { - if let Err(err) = session.garbage_collect() { - tracing::error!("Unable to perform garbage collection: {}", err.to_string()); - } - } - } - let now = std::time::Instant::now(); - let engines_clone = session.engines.read().clone(); - eprintln!("parse_project: engines_clone: {:?}", now.elapsed()); +// if let Some(version) = version { +// // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. +// if version % 10 == 0 { +// if let Err(err) = session.garbage_collect() { +// tracing::error!("Unable to perform garbage collection: {}", err.to_string()); +// } +// } +// } +// let now = std::time::Instant::now(); +// let engines_clone = session.engines.read().clone(); +// eprintln!("parse_project: engines_clone: {:?}", now.elapsed()); - let now = std::time::Instant::now(); - let parse_result = session::parse_project(&uri, &engines_clone, retrigger_compilation)?; - eprintln!("compilation_took: {:?}", now.elapsed()); +// let now = std::time::Instant::now(); +// let parse_result = session::parse_project(&uri, &engines_clone, retrigger_compilation)?; +// eprintln!("compilation_took: {:?}", now.elapsed()); - let now = std::time::Instant::now(); - *session.engines.write() = engines_clone; - eprintln!("parse_project: engines_write: {:?}", now.elapsed()); +// let now = std::time::Instant::now(); +// *session.engines.write() = engines_clone; +// eprintln!("parse_project: engines_write: {:?}", now.elapsed()); - session.write_parse_result(parse_result); - Ok(()) - }) - .await - .unwrap_or_else(|_| Err(LanguageServerError::FailedToParse)) -} +// session.write_parse_result(parse_result); +// Ok(()) +// }) +// .await +// .unwrap_or_else(|_| Err(LanguageServerError::FailedToParse)) +// } /// `Sessions` is a collection of [Session]s, each of which represents a project /// that has been opened in the users workspace. diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 69ab6ae3070..32ff735578a 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -91,7 +91,7 @@ async fn did_open() { async fn did_change() { let (mut service, _) = LspService::new(ServerState::new); let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; - let _ = lsp::did_change_request(&mut service, &uri).await; + let _ = lsp::did_change_request(&mut service, &uri, 1).await; shutdown_and_exit(&mut service).await; } @@ -101,7 +101,8 @@ async fn did_cache_test() { .custom_method("sway/metrics", ServerState::metrics) .finish(); let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; - let _ = lsp::did_change_request(&mut service, &uri).await; + let _ = lsp::did_change_request(&mut service, &uri, 1).await; + service.inner().wait_for_parsing().await; let metrics = lsp::metrics_request(&mut service, &uri).await; assert!(metrics.len() >= 2); for (path, metrics) in metrics { @@ -118,16 +119,30 @@ async fn did_change_stress_test() { let (mut service, _) = LspService::build(ServerState::new) .custom_method("sway/metrics", ServerState::metrics) .finish(); - let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; - let times = 20; - for _ in 0..times { - let _ = lsp::did_change_request(&mut service, &uri).await; + let bench_dir = sway_workspace_dir().join("sway-lsp/tests/fixtures/benchmark"); + + let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await; + let times = 200000; + for version in 0..times { + let _ = lsp::did_change_request(&mut service, &uri, version + 1).await; + if version == 0 { + service.inner().wait_for_parsing().await; + } let metrics = lsp::metrics_request(&mut service, &uri).await; for (path, metrics) in metrics { if path.contains("sway-lib-core") || path.contains("sway-lib-std") { + eprintln!("metrics.reused_modules: {}", metrics.reused_modules); assert!(metrics.reused_modules >= 1); } } + + if rand::random::() < 220 { + let random_duration = rand::random::() as u64 % 80; + std::thread::sleep(std::time::Duration::from_millis(random_duration)); + } else { + let random_duration = rand::random::() % 3000; + std::thread::sleep(std::time::Duration::from_millis(random_duration)); + } } shutdown_and_exit(&mut service).await; } @@ -146,7 +161,8 @@ async fn lsp_syncs_with_workspace_edits() { def_path: uri.as_str(), }; lsp::definition_check(service.inner(), &go_to).await; - let _ = lsp::did_change_request(&mut service, &uri).await; + let _ = lsp::did_change_request(&mut service, &uri, 1).await; + service.inner().wait_for_parsing().await; go_to.def_line = 20; lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 45, 24).await; shutdown_and_exit(&mut service).await; @@ -421,10 +437,6 @@ async fn go_to_definition_for_modules() { }; // mod test_mod; lsp::definition_check(&server, &opt_go_to).await; - - let _ = server.shutdown_server(); - - let server = ServerState::default(); let uri = open( &server, test_fixtures_dir().join("tokens/modules/src/test_mod.sw"), From d71744f9b06549dd2af69a78b7e24008c3be9201 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 20 Dec 2023 16:37:04 +1100 Subject: [PATCH 08/40] return from check with an error if compilation was cancelled --- forc-pkg/src/pkg.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/forc-pkg/src/pkg.rs b/forc-pkg/src/pkg.rs index d50481e3a2f..7f3a3899387 100644 --- a/forc-pkg/src/pkg.rs +++ b/forc-pkg/src/pkg.rs @@ -2641,6 +2641,14 @@ pub fn check( retrigger_compilation.clone(), ); + if retrigger_compilation + .as_ref() + .map(|b| b.load(std::sync::atomic::Ordering::SeqCst)) + .unwrap_or(false) + { + bail!("compilation was retriggered") + } + let programs = match programs_res.as_ref() { Ok(programs) => programs, _ => { From 9cc39165dcaf1f5d6c4a42a7c8b542d3c2780941 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 20 Dec 2023 16:37:32 +1100 Subject: [PATCH 09/40] remove Arc from QE as it was causing sync issues --- sway-core/src/query_engine/mod.rs | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/sway-core/src/query_engine/mod.rs b/sway-core/src/query_engine/mod.rs index 8a0d84f887d..ab4338df845 100644 --- a/sway-core/src/query_engine/mod.rs +++ b/sway-core/src/query_engine/mod.rs @@ -45,10 +45,19 @@ pub struct ProgramsCacheEntry { pub type ProgramsCacheMap = HashMap; -#[derive(Debug, Default, Clone)] +#[derive(Debug, Default)] pub struct QueryEngine { - parse_module_cache: Arc>, - programs_cache: Arc>, + parse_module_cache: RwLock, + programs_cache: RwLock, +} + +impl Clone for QueryEngine { + fn clone(&self) -> Self { + Self { + parse_module_cache: RwLock::new(self.parse_module_cache.read().unwrap().clone()), + programs_cache: RwLock::new(self.programs_cache.read().unwrap().clone()), + } + } } impl QueryEngine { From b5cceeec24693495859150be30ad6426525f3960 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 20 Dec 2023 16:39:01 +1100 Subject: [PATCH 10/40] bump tower lsp version --- sway-lsp/Cargo.toml | 4 +++- sway-lsp/tests/utils/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/sway-lsp/Cargo.toml b/sway-lsp/Cargo.toml index fa049a0da79..bdfd49f2b0a 100644 --- a/sway-lsp/Cargo.toml +++ b/sway-lsp/Cargo.toml @@ -10,6 +10,7 @@ repository.workspace = true [dependencies] anyhow = "1.0.41" +crossbeam-channel = "0.5" dashmap = "5.4" fd-lock = "4.0" forc-pkg = { version = "0.48.1", path = "../forc-pkg" } @@ -46,7 +47,7 @@ tokio = { version = "1.3", features = [ "time", ] } toml_edit = "0.19" -tower-lsp = { version = "0.19", features = ["proposed"] } +tower-lsp = { version = "0.20", features = ["proposed"] } tracing = "0.1" urlencoding = "2.1.2" @@ -59,6 +60,7 @@ futures = { version = "0.3", default-features = false, features = [ "async-await", ] } pretty_assertions = "1.4.0" +rand = "0.8" regex = "^1.10.2" sway-lsp-test-utils = { path = "tests/utils" } tikv-jemallocator = "0.5" diff --git a/sway-lsp/tests/utils/Cargo.toml b/sway-lsp/tests/utils/Cargo.toml index 13fd96796ed..b0811cddd92 100644 --- a/sway-lsp/tests/utils/Cargo.toml +++ b/sway-lsp/tests/utils/Cargo.toml @@ -17,4 +17,4 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.60" tokio = { version = "1.3", features = ["io-std", "io-util", "macros", "net", "rt-multi-thread", "sync", "time"] } tower = { version = "0.4.12", default-features = false, features = ["util"] } -tower-lsp = { version = "0.19", features = ["proposed"] } \ No newline at end of file +tower-lsp = { version = "0.20", features = ["proposed"] } \ No newline at end of file From 02230a5614356cb2d55933359a685c061888cc4d Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Wed, 20 Dec 2023 17:22:43 +1100 Subject: [PATCH 11/40] shutdown compilation thread on exit --- sway-lsp/src/server_state.rs | 51 +++++++++++++++++++++++++----------- 1 file changed, 35 insertions(+), 16 deletions(-) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 1fe3d4d29dd..2090551480d 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -12,7 +12,7 @@ use dashmap::DashMap; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; -use std::{path::PathBuf, sync::{Arc, atomic::{AtomicBool, Ordering}}}; +use std::{path::PathBuf, sync::{Arc, atomic::{AtomicBool, Ordering}}, thread::JoinHandle}; use tower_lsp::{jsonrpc, Client}; /// `ServerState` is the primary mutable state of the language server @@ -26,6 +26,7 @@ pub struct ServerState { pub(crate) mpsc_tx: Sender, pub(crate) mpsc_rx: Arc>, pub(crate) finished_compilation: Arc, + pub(crate) compilation_thread_join_handle: RwLock>>, } impl Default for ServerState { @@ -42,9 +43,10 @@ impl Default for ServerState { mpsc_tx, mpsc_rx: Arc::new(mpsc_rx), finished_compilation: Arc::new(tokio::sync::Notify::new()), + compilation_thread_join_handle: RwLock::new(None), }; - state.spawn_compilation_thread(); + *state.compilation_thread_join_handle.write() = Some(state.spawn_compilation_thread()); state } } @@ -56,15 +58,24 @@ pub struct Shared { pub version: Option, } -fn reset_compilation_state( +fn update_compilation_state( is_compiling: Arc, retrigger_compilation: Arc, - finished_compilation: Arc + finished_compilation: Arc, + rx: Arc>, ) { - is_compiling.store(false, Ordering::Relaxed); - retrigger_compilation.store(false, Ordering::Relaxed); - eprintln!("THREAD | finished compilation, notifying waiters"); - finished_compilation.notify_waiters(); + eprintln!("THREAD | update_compilation_state"); + retrigger_compilation.store(false, Ordering::SeqCst); + eprintln!("THREAD | retrigger_compilation = {:?}", retrigger_compilation.load(Ordering::SeqCst)); + // Make sure there isn't any pending compilation work + if rx.is_empty() { + eprintln!("THREAD | no pending compilation work, safe to set is_compiling to false"); + is_compiling.store(false, Ordering::SeqCst); + eprintln!("THREAD | finished compilation, notifying waiters"); + finished_compilation.notify_waiters(); + } else { + eprintln!("THREAD | there is pending compilation work"); + } } impl ServerState { @@ -76,7 +87,7 @@ impl ServerState { } } - pub fn spawn_compilation_thread(&self) { + pub fn spawn_compilation_thread(&self) -> JoinHandle<()> { let is_compiling = self.is_compiling.clone(); let retrigger_compilation = self.retrigger_compilation.clone(); let finished_compilation = self.finished_compilation.clone(); @@ -84,6 +95,7 @@ impl ServerState { std::thread::spawn(move || { while let Ok(shared) = rx.recv() { eprintln!("THREAD | received new compilation request"); + let uri = shared.uri.as_ref().unwrap().clone(); let version = shared.version; let session = shared.session.as_ref().unwrap().clone(); @@ -101,23 +113,23 @@ impl ServerState { // } eprintln!("THREAD | starting parsing project: version: {:?}", version); - match session::parse_project(&uri, &engines_clone, Some(retrigger_compilation.clone())) { + match session::parse_project(&uri, version, &engines_clone, Some(retrigger_compilation.clone())) { Ok(parse_result) => { eprintln!("THREAD | engines_write: {:?}", version); *session.engines.write() = engines_clone; eprintln!("THREAD | success, about to write parse results: {:?}", version); session.write_parse_result(parse_result); - reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone()); + update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); }, Err(err) => { - eprintln!("{:?}", err); - reset_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone()); + eprintln!("compilation has returned cancelled {:?}", err); + update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); continue; }, } eprintln!("THREAD | finished parsing project: version: {:?}", version); } - }); + }) } /// Waits asynchronously for the `is_compiling` flag to become false. @@ -127,8 +139,8 @@ impl ServerState { /// this process until `is_compiling` becomes false. pub async fn wait_for_parsing(&self) { loop { - eprintln!("are we still compiling?"); - if !self.is_compiling.load(Ordering::Relaxed) { + eprintln!("are we still compiling? | is_compiling = {:?}", self.is_compiling.load(Ordering::SeqCst)); + if !self.is_compiling.load(Ordering::SeqCst) { eprintln!("compilation is finished, lets check if there are pending compilation requests"); if self.mpsc_rx.is_empty() { eprintln!("no pending compilation work, safe to break"); @@ -146,6 +158,13 @@ impl ServerState { pub fn shutdown_server(&self) -> jsonrpc::Result<()> { tracing::info!("Shutting Down the Sway Language Server"); + + // shutdown the compilation thread + let mut join_handle_option = self.compilation_thread_join_handle.write(); + if let Some(join_handle) = std::mem::take(&mut *join_handle_option) { + let _ = join_handle.join(); + } + let _ = self.sessions.iter().map(|item| { let session = item.value(); session.shutdown(); From 5af9985e488bb2f085f5254611d1b4736a1631b4 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 13:34:17 +1100 Subject: [PATCH 12/40] support partial semantic token responses --- sway-lsp/src/capabilities/semantic_tokens.rs | 19 +++++++++++++- sway-lsp/src/handlers/request.rs | 27 +++++++++++++++++++- sway-lsp/src/lib.rs | 7 +++-- sway-lsp/src/server.rs | 11 ++++++-- 4 files changed, 56 insertions(+), 8 deletions(-) diff --git a/sway-lsp/src/capabilities/semantic_tokens.rs b/sway-lsp/src/capabilities/semantic_tokens.rs index 61e3ce74724..894c9c938bf 100644 --- a/sway-lsp/src/capabilities/semantic_tokens.rs +++ b/sway-lsp/src/capabilities/semantic_tokens.rs @@ -4,7 +4,7 @@ use crate::core::{ }; use lsp_types::{ Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, - SemanticTokensResult, Url, + SemanticTokensResult, Url, SemanticTokensRangeResult, }; use std::sync::{ atomic::{AtomicU32, Ordering}, @@ -24,6 +24,23 @@ pub fn semantic_tokens_full(session: Arc, url: &Url) -> Option, url: &Url, range: &Range) -> Option { + eprintln!("semantic_tokens_range: range: {:#?}", range); + let mut tokens_sorted: Vec<_> = session.token_map().tokens_for_file(url) + .filter(|t| { + // make sure the tokenident range is within the range that was passed in + let token_range = t.0.range; + token_range.start >= range.start && token_range.end <= range.end + }).collect(); + eprintln!("Number of tokens in range: {}", tokens_sorted.len()); + tokens_sorted.sort_by(|(a_span, _), (b_span, _)| { + let a = (a_span.range.start, a_span.range.end); + let b = (b_span.range.start, b_span.range.end); + a.cmp(&b) + }); + Some(semantic_tokens(&tokens_sorted).into()) +} + //------------------------------- /// Tokens are encoded relative to each other. /// diff --git a/sway-lsp/src/handlers/request.rs b/sway-lsp/src/handlers/request.rs index c1fc9b64b94..9b0e1b7f1a4 100644 --- a/sway-lsp/src/handlers/request.rs +++ b/sway-lsp/src/handlers/request.rs @@ -8,7 +8,7 @@ use forc_tracing::{init_tracing_subscriber, TracingSubscriberOptions, TracingWri use lsp_types::{ CodeLens, CompletionResponse, DocumentFormattingParams, DocumentSymbolResponse, InitializeResult, InlayHint, InlayHintParams, PrepareRenameResponse, RenameParams, - SemanticTokensParams, SemanticTokensResult, TextDocumentIdentifier, Url, WorkspaceEdit, + SemanticTokensParams, SemanticTokensResult, TextDocumentIdentifier, Url, WorkspaceEdit, SemanticTokensRangeResult, SemanticTokensRangeParams, }; use std::{ fs::File, @@ -198,6 +198,7 @@ pub async fn handle_document_highlight( state: &ServerState, params: lsp_types::DocumentHighlightParams, ) -> Result>> { + let _ = state.wait_for_parsing().await; match state .sessions .uri_and_session_from_workspace(¶ms.text_document_position_params.text_document.uri) @@ -220,6 +221,7 @@ pub async fn handle_formatting( state: &ServerState, params: DocumentFormattingParams, ) -> Result>> { + let _ = state.wait_for_parsing().await; state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) @@ -275,6 +277,29 @@ pub async fn handle_code_lens( } } +pub async fn handle_semantic_tokens_range( + state: &ServerState, + params: SemanticTokensRangeParams, +) -> Result> { + eprintln!("semantic_tokens_range"); + let _ = state.wait_for_parsing().await; + match state + .sessions + .uri_and_session_from_workspace(¶ms.text_document.uri) + .await + { + Ok((uri, session)) => { + Ok(capabilities::semantic_tokens::semantic_tokens_range( + session, &uri, ¶ms.range, + )) + } + Err(err) => { + tracing::error!("{}", err.to_string()); + Ok(None) + } + } +} + pub async fn handle_semantic_tokens_full( state: &ServerState, params: SemanticTokensParams, diff --git a/sway-lsp/src/lib.rs b/sway-lsp/src/lib.rs index 1d8cbec56e2..a11b4c0a364 100644 --- a/sway-lsp/src/lib.rs +++ b/sway-lsp/src/lib.rs @@ -16,7 +16,7 @@ pub mod utils; use lsp_types::{ CodeActionProviderCapability, CodeLensOptions, CompletionOptions, ExecuteCommandOptions, - HoverProviderCapability, OneOf, RenameOptions, SemanticTokensFullOptions, SemanticTokensLegend, + HoverProviderCapability, OneOf, RenameOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, WorkDoneProgressOptions, }; @@ -27,7 +27,7 @@ pub async fn start() { let (service, socket) = LspService::build(ServerState::new) .custom_method("sway/show_ast", ServerState::show_ast) .custom_method("sway/visualize", ServerState::visualize) - .custom_method("sway/on_enter", ServerState::on_enter) + // .custom_method("sway/on_enter", ServerState::on_enter) .custom_method("sway/metrics", ServerState::metrics) .finish(); Server::new(tokio::io::stdin(), tokio::io::stdout(), socket) @@ -69,8 +69,7 @@ pub fn server_capabilities() -> ServerCapabilities { token_types: capabilities::semantic_tokens::SUPPORTED_TYPES.to_vec(), token_modifiers: capabilities::semantic_tokens::SUPPORTED_MODIFIERS.to_vec(), }, - full: Some(SemanticTokensFullOptions::Bool(true)), - range: None, + range: Some(true), ..Default::default() } .into(), diff --git a/sway-lsp/src/server.rs b/sway-lsp/src/server.rs index 8705f6e6ee8..dd26faad0b8 100644 --- a/sway-lsp/src/server.rs +++ b/sway-lsp/src/server.rs @@ -15,7 +15,7 @@ use lsp_types::{ DocumentSymbolResponse, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InitializeParams, InitializeResult, InitializedParams, InlayHint, InlayHintParams, PrepareRenameResponse, RenameParams, SemanticTokensParams, SemanticTokensResult, - TextDocumentIdentifier, TextDocumentPositionParams, TextEdit, WorkspaceEdit, + TextDocumentIdentifier, TextDocumentPositionParams, TextEdit, WorkspaceEdit, SemanticTokensRangeParams, SemanticTokensRangeResult, }; use sway_utils::PerformanceData; use tower_lsp::{jsonrpc::Result, LanguageServer}; @@ -31,7 +31,7 @@ impl LanguageServer for ServerState { } async fn shutdown(&self) -> Result<()> { - self.shutdown_server() + self.shutdown_server().await } async fn did_open(&self, params: DidOpenTextDocumentParams) { @@ -94,6 +94,13 @@ impl LanguageServer for ServerState { request::handle_semantic_tokens_full(self, params).await } + async fn semantic_tokens_range( + &self, + params: SemanticTokensRangeParams, + ) -> Result> { + request::handle_semantic_tokens_range(self, params).await + } + async fn document_highlight( &self, params: DocumentHighlightParams, From b3be9546482084f29dbb3894bb2dfdd1da56fe7c Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 13:35:29 +1100 Subject: [PATCH 13/40] synchronise atomic events --- sway-lsp/src/core/session.rs | 18 +++- sway-lsp/src/handlers/notification.rs | 23 +++-- sway-lsp/src/server_state.rs | 134 ++++++++++++++++---------- 3 files changed, 112 insertions(+), 63 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 29016500ea7..2ed2cc37b5b 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -149,6 +149,7 @@ impl Session { self.runnables.clear(); self.metrics.clear(); + eprintln!("THREAD | success, about to token map"); res.token_map.deref().iter().for_each(|item| { let (i, t) = item.pair(); self.token_map.insert(i.clone(), t.clone()); @@ -434,10 +435,14 @@ pub(crate) fn build_plan(uri: &Url) -> Result { pub fn compile( uri: &Url, + version: Option, engines: &Engines, retrigger_compilation: Option>, ) -> Result, Handler)>, LanguageServerError> { + let now = std::time::Instant::now(); + eprintln!("loading build plan for version {:?}", version); let build_plan = build_plan(uri)?; + eprintln!("build plan loaded, about to compile version {:?} | took {:?}", version, now.elapsed()); let tests_enabled = true; pkg::check( &build_plan, @@ -534,9 +539,14 @@ pub fn traverse( } /// Parses the project and returns true if the compiler diagnostics are new and should be published. -pub fn parse_project(uri: &Url, engines: &Engines, retrigger_compilation: Option>) -> Result { - let results = compile(uri, engines, retrigger_compilation)?; - eprintln!("compilation successful, starting traversal"); +pub fn parse_project(uri: &Url, version: Option, engines: &Engines, retrigger_compilation: Option>) -> Result { + let results = compile(uri, version, engines, retrigger_compilation)?; + if results.last().is_none() { + eprintln!("compilation failed, returning"); + return Err(LanguageServerError::ProgramsIsNone); + } else { + eprintln!("compilation successful, starting traversal"); + } let TraversalResult { diagnostics, programs, @@ -619,7 +629,7 @@ mod tests { let dir = get_absolute_path("sway-lsp/tests/fixtures"); let uri = get_url(&dir); let engines = Engines::default(); - let result = parse_project(&uri, &engines, None).expect_err("expected ManifestFileNotFound"); + let result = parse_project(&uri, None, &engines, None).expect_err("expected ManifestFileNotFound"); assert!(matches!( result, LanguageServerError::DocumentError( diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index a3eea18f612..5cc29211f84 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -2,8 +2,7 @@ //! Protocol. This module specifically handles notification messages sent by the Client. use std::sync::{atomic::Ordering, Arc}; - -use crate::{core::{document, session::Session}, error::LanguageServerError, server_state::ServerState}; +use crate::{core::{document, session::Session}, error::LanguageServerError, server_state::{ServerState, Shared, ThreadMessage}}; use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, FileChangeType, Url, @@ -14,6 +13,8 @@ pub async fn handle_did_open_text_document( params: DidOpenTextDocumentParams, ) -> Result<(), LanguageServerError> { eprintln!("did_open_text_document"); + + //eprintln!("did_open_text_document"); let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) @@ -38,26 +39,26 @@ fn send_new_compilation_request( uri: &Url, version: Option, ) { - if state.is_compiling.load(Ordering::Relaxed) { + eprintln!("new compilation request: version {:?} - setting is_compiling to true", version); + if state.is_compiling.load(Ordering::SeqCst) { eprintln!("retrigger compilation!"); state.retrigger_compilation.store(true, Ordering::SeqCst); } - eprintln!("new compilation request - setting is_compiling to true"); - state.is_compiling.store(true, Ordering::Relaxed); // If channel is full, remove the old value so the compilation thread only // gets the latest value. if state.mpsc_tx.is_full() { - eprintln!("channel is full!"); - let _ = state.mpsc_rx.try_recv(); + if let Ok(ThreadMessage::CompilationData(res)) = state.mpsc_rx.try_recv() { + eprintln!("channel is full! discarding version: {:?}", res.version); + } } - eprintln!("sending new compilation request"); - let _ = state.mpsc_tx.send(crate::server_state::Shared { + eprintln!("sending new compilation request: version {:?}", version); + let _ = state.mpsc_tx.send(ThreadMessage::CompilationData(Shared { session: Some(session.clone()), uri: Some(uri.clone()), version, - }); + })); } pub async fn handle_did_change_text_document( @@ -70,9 +71,11 @@ pub async fn handle_did_change_text_document( .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await?; + eprintln!("writing changes to file for version: {:?}", params.text_document.version); session .write_changes_to_file(&uri, params.content_changes) .await?; + eprintln!("changes for version {:?} have been written to disk", params.text_document.version); send_new_compilation_request(&state, session.clone(), &uri, Some(params.text_document.version)); Ok(()) } diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 2090551480d..d33b67ff752 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -23,10 +23,17 @@ pub struct ServerState { pub(crate) sessions: Arc, pub(crate) retrigger_compilation: Arc, pub(crate) is_compiling: Arc, - pub(crate) mpsc_tx: Sender, - pub(crate) mpsc_rx: Arc>, + pub(crate) mpsc_tx: Sender, + pub(crate) mpsc_rx: Arc>, pub(crate) finished_compilation: Arc, - pub(crate) compilation_thread_join_handle: RwLock>>, + + pub(crate) last_compilation_state: Arc>, +} + +#[derive(Debug)] +pub enum LastCompilationState { + Success, + Failed, } impl Default for ServerState { @@ -43,14 +50,20 @@ impl Default for ServerState { mpsc_tx, mpsc_rx: Arc::new(mpsc_rx), finished_compilation: Arc::new(tokio::sync::Notify::new()), - compilation_thread_join_handle: RwLock::new(None), + last_compilation_state: Arc::new(RwLock::new(LastCompilationState::Success)), }; - *state.compilation_thread_join_handle.write() = Some(state.spawn_compilation_thread()); + state.spawn_compilation_thread(); state } } +#[derive(Debug)] +pub enum ThreadMessage { + CompilationData(Shared), + Terminate, +} + #[derive(Debug, Default)] pub struct Shared { pub session: Option>, @@ -62,15 +75,20 @@ fn update_compilation_state( is_compiling: Arc, retrigger_compilation: Arc, finished_compilation: Arc, - rx: Arc>, + rx: Arc>, ) { eprintln!("THREAD | update_compilation_state"); + + is_compiling.store(false, Ordering::SeqCst); + eprintln!("THREAD | is_compiling = {:?}", is_compiling.load(Ordering::SeqCst)); + retrigger_compilation.store(false, Ordering::SeqCst); eprintln!("THREAD | retrigger_compilation = {:?}", retrigger_compilation.load(Ordering::SeqCst)); + // Make sure there isn't any pending compilation work if rx.is_empty() { - eprintln!("THREAD | no pending compilation work, safe to set is_compiling to false"); - is_compiling.store(false, Ordering::SeqCst); + //eprintln!("THREAD | no pending compilation work, safe to set is_compiling to false"); + eprintln!("THREAD | finished compilation, notifying waiters"); finished_compilation.notify_waiters(); } else { @@ -87,49 +105,61 @@ impl ServerState { } } - pub fn spawn_compilation_thread(&self) -> JoinHandle<()> { + pub fn spawn_compilation_thread(&self) { let is_compiling = self.is_compiling.clone(); let retrigger_compilation = self.retrigger_compilation.clone(); let finished_compilation = self.finished_compilation.clone(); let rx = self.mpsc_rx.clone(); + let last_compilation_state = self.last_compilation_state.clone(); std::thread::spawn(move || { - while let Ok(shared) = rx.recv() { - eprintln!("THREAD | received new compilation request"); - - let uri = shared.uri.as_ref().unwrap().clone(); - let version = shared.version; - let session = shared.session.as_ref().unwrap().clone(); - let mut engines_clone = session.engines.read().clone(); - - // if let Some(version) = version { - // // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. - // if version % 10 == 0 { - // // Call this on the engines clone so we don't clear types that are still in use - // // and might be needed in the case cancel compilation was triggered. - // if let Err(err) = session.garbage_collect(&mut engines_clone) { - // tracing::error!("Unable to perform garbage collection: {}", err.to_string()); - // } - // } - // } - - eprintln!("THREAD | starting parsing project: version: {:?}", version); - match session::parse_project(&uri, version, &engines_clone, Some(retrigger_compilation.clone())) { - Ok(parse_result) => { - eprintln!("THREAD | engines_write: {:?}", version); - *session.engines.write() = engines_clone; - eprintln!("THREAD | success, about to write parse results: {:?}", version); - session.write_parse_result(parse_result); - update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); - }, - Err(err) => { - eprintln!("compilation has returned cancelled {:?}", err); - update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); - continue; - }, + while let Ok(msg) = rx.recv() { + match msg { + ThreadMessage::CompilationData(shared) => { + eprintln!("THREAD | received new compilation request"); + + let uri = shared.uri.as_ref().unwrap().clone(); + let version = shared.version; + let session = shared.session.as_ref().unwrap().clone(); + let mut engines_clone = session.engines.read().clone(); + + // if let Some(version) = version { + // // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. + // if version % 10 == 0 { + // // Call this on the engines clone so we don't clear types that are still in use + // // and might be needed in the case cancel compilation was triggered. + // if let Err(err) = session.garbage_collect(&mut engines_clone) { + // tracing::error!("Unable to perform garbage collection: {}", err.to_string()); + // } + // } + // } + is_compiling.store(true, Ordering::SeqCst); + eprintln!("THREAD | starting parsing project: version: {:?}", version); + match session::parse_project(&uri, version, &engines_clone, Some(retrigger_compilation.clone())) { + Ok(parse_result) => { + eprintln!("THREAD | engines_write: {:?}", version); + *session.engines.write() = engines_clone; + eprintln!("THREAD | success, about to write parse results: {:?}", version); + session.write_parse_result(parse_result); + eprintln!("THREAD | finished writing parse results: {:?}", version); + update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); + *last_compilation_state.write() = LastCompilationState::Success; + }, + Err(err) => { + eprintln!("compilation has returned cancelled {:?}", err); + update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); + *last_compilation_state.write() = LastCompilationState::Failed; + continue; + }, + } + eprintln!("THREAD | finished parsing project: version: {:?}", version); + } + ThreadMessage::Terminate => { + eprintln!("THREAD | received terminate message"); + return; + } } - eprintln!("THREAD | finished parsing project: version: {:?}", version); } - }) + }); } /// Waits asynchronously for the `is_compiling` flag to become false. @@ -144,6 +174,7 @@ impl ServerState { eprintln!("compilation is finished, lets check if there are pending compilation requests"); if self.mpsc_rx.is_empty() { eprintln!("no pending compilation work, safe to break"); + eprintln!("And the last compilation state was: {:?}", &self.last_compilation_state.read()); break; } else { eprintln!("there is pending compilation work, lets wait for it to finish"); @@ -156,15 +187,20 @@ impl ServerState { } } - pub fn shutdown_server(&self) -> jsonrpc::Result<()> { + pub async fn shutdown_server(&self) -> jsonrpc::Result<()> { tracing::info!("Shutting Down the Sway Language Server"); - // shutdown the compilation thread - let mut join_handle_option = self.compilation_thread_join_handle.write(); - if let Some(join_handle) = std::mem::take(&mut *join_handle_option) { - let _ = join_handle.join(); + // set the retrigger_compilation flag to true so that the compilation exit early + while let Ok(_) = self.mpsc_rx.try_recv() { + eprintln!("draining pending compilation requests"); } + self.retrigger_compilation.store(true, Ordering::SeqCst); + self.wait_for_parsing().await; + + eprintln!("sending terminate message"); + self.mpsc_tx.send(ThreadMessage::Terminate).expect("failed to send terminate message"); + eprintln!("shutting down the sessions"); let _ = self.sessions.iter().map(|item| { let session = item.value(); session.shutdown(); From b9de23d74d30129a2a8c526f91b567842162e6c7 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 13:36:09 +1100 Subject: [PATCH 14/40] mahe shutdown server async --- sway-lsp/tests/lib.rs | 70 ++++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 32ff735578a..bece8de531b 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -84,7 +84,7 @@ async fn initialize() { async fn did_open() { let server = ServerState::default(); let _ = open(&server, e2e_test_dir().join("src/main.sw")).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -113,7 +113,7 @@ async fn did_cache_test() { shutdown_and_exit(&mut service).await; } -// #[tokio::test] +#[tokio::test] #[allow(dead_code)] async fn did_change_stress_test() { let (mut service, _) = LspService::build(ServerState::new) @@ -121,8 +121,9 @@ async fn did_change_stress_test() { .finish(); let bench_dir = sway_workspace_dir().join("sway-lsp/tests/fixtures/benchmark"); + let now = std::time::Instant::now(); let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await; - let times = 200000; + let times = 400; for version in 0..times { let _ = lsp::did_change_request(&mut service, &uri, version + 1).await; if version == 0 { @@ -131,20 +132,21 @@ async fn did_change_stress_test() { let metrics = lsp::metrics_request(&mut service, &uri).await; for (path, metrics) in metrics { if path.contains("sway-lib-core") || path.contains("sway-lib-std") { - eprintln!("metrics.reused_modules: {}", metrics.reused_modules); assert!(metrics.reused_modules >= 1); } } - if rand::random::() < 220 { - let random_duration = rand::random::() as u64 % 80; - std::thread::sleep(std::time::Duration::from_millis(random_duration)); - } else { - let random_duration = rand::random::() % 3000; - std::thread::sleep(std::time::Duration::from_millis(random_duration)); - } + // if rand::random::() < 220 { + // let random_duration = rand::random::() as u64 % 80; + // std::thread::sleep(std::time::Duration::from_millis(random_duration)); + // } else { + // let random_duration = rand::random::() % 3000; + // std::thread::sleep(std::time::Duration::from_millis(random_duration)); + // } } + eprintln!("SHUTTING DOWN!..."); shutdown_and_exit(&mut service).await; + eprintln!("did_change_stress_test took: {:?}", now.elapsed()); } #[tokio::test] @@ -173,7 +175,7 @@ async fn show_ast() { let server = ServerState::default(); let uri = open(&server, e2e_test_dir().join("src/main.sw")).await; lsp::show_ast_request(&server, &uri, "typed", None).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -181,7 +183,7 @@ async fn visualize() { let server = ServerState::default(); let uri = open(&server, e2e_test_dir().join("src/main.sw")).await; lsp::visualize_request(&server, &uri, "build_plan").await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } //------------------- GO TO DEFINITION -------------------// @@ -200,7 +202,7 @@ async fn go_to_definition() { def_path: uri.as_str(), }; lsp::definition_check(&server, &go_to).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -254,7 +256,7 @@ async fn go_to_definition_for_fields() { // Foo lsp::definition_check(&server, &opt_go_to).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -302,7 +304,7 @@ async fn go_to_definition_inside_turbofish() { lsp::definition_check_with_req_offset(&server, &mut res_go_to, 23, 27).await; lsp::definition_check_with_req_offset(&server, &mut res_go_to, 24, 33).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -414,7 +416,7 @@ async fn go_to_definition_for_matches() { // ExampleStruct.variable lsp::definition_check(&server, &go_to).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -455,7 +457,7 @@ async fn go_to_definition_for_modules() { // mod deep_mod; lsp::definition_check(&server, &opt_go_to).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -827,7 +829,7 @@ async fn go_to_definition_for_paths() { // dfun // lsp::definition_check(&server, &go_to).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -856,7 +858,7 @@ async fn go_to_definition_for_traits() { trait_go_to.req_char = 20; trait_go_to.def_line = 3; lsp::definition_check(&server, &trait_go_to).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -947,7 +949,7 @@ async fn go_to_definition_for_variables() { lsp::definition_check_with_req_offset(&server, &mut go_to, 60, 50).await; lsp::definition_check_with_req_offset(&server, &mut go_to, 61, 50).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1377,7 +1379,7 @@ async fn go_to_definition_for_storage() { // storage.var1.z.x lsp::definition_check(&server, &go_to).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } //------------------- HOVER DOCUMENTATION -------------------// @@ -1402,7 +1404,7 @@ async fn hover_docs_for_consts() { hover.req_char = 49; hover.documentation = vec![" CONSTANT_2 has a value of 200"]; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1421,7 +1423,7 @@ async fn hover_docs_for_functions() { documentation: vec!["```sway\npub fn bar(p: Point) -> Point\n```\n---\n A function declaration with struct as a function parameter\n\n---\nGo to [Point](command:sway.goToLocation?%5B%7B%22range%22%3A%7B%22end%22%3A%7B%22character%22%3A1%2C%22line%22%3A5%7D%2C%22start%22%3A%7B%22character%22%3A0%2C%22line%22%3A2%7D%7D%2C%22uri%22%3A%22file","sway%2Fsway-lsp%2Ftests%2Ffixtures%2Ftokens%2Ffunctions%2Fsrc%2Fmain.sw%22%7D%5D \"functions::Point\")"], }; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1458,7 +1460,7 @@ async fn hover_docs_for_structs() { documentation: vec!["```sway\nstruct MyStruct\n```\n---\n My struct type"], }; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1485,7 +1487,7 @@ async fn hover_docs_for_enums() { hover.req_char = 29; hover.documentation = vec![" Docs for variants"]; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1500,7 +1502,7 @@ async fn hover_docs_for_abis() { documentation: vec!["```sway\nabi MyContract\n```\n---\n Docs for MyContract"], }; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1519,7 +1521,7 @@ async fn hover_docs_for_variables() { documentation: vec!["```sway\nlet variable8: ContractCaller\n```\n---"], }; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1534,7 +1536,7 @@ async fn hover_docs_with_code_examples() { documentation: vec!["```sway\nstruct Data\n```\n---\n Struct holding:\n\n 1. A `value` of type `NumberOrString`\n 2. An `address` of type `u64`"], }; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1553,7 +1555,7 @@ async fn hover_docs_for_self_keywords() { hover.req_char = 24; hover.documentation = vec!["```sway\nstruct MyStruct\n```\n---\n\n---\n[2 implementations](command:sway.peekLocations?%5B%7B%22locations%22%3A%5B%7B%22range%22%3A%7B%22end%22%3A%7B%22character%22%3A1%2C%22line%22%3A4%7D%2C%22start%22%3A%7B%22character%22%3A0%2C%22line%22%3A2%7D%7D%2C%22uri%22%3A%22file","sway%2Fsway-lsp%2Ftests%2Ffixtures%2Fcompletion%2Fsrc%2Fmain.sw%22%7D%2C%7B%22range%22%3A%7B%22end%22%3A%7B%22character%22%3A1%2C%22line%22%3A14%7D%2C%22start%22%3A%7B%22character%22%3A0%2C%22line%22%3A6%7D%7D%2C%22uri%22%3A%22file","sway%2Fsway-lsp%2Ftests%2Ffixtures%2Fcompletion%2Fsrc%2Fmain.sw%22%7D%5D%7D%5D \"Go to implementations\")"]; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1577,7 +1579,7 @@ async fn hover_docs_for_boolean_keywords() { hover.req_char = 31; hover.documentation = vec!["\n```sway\ntrue\n```\n\n---\n\n A value of type [`bool`] representing logical **true**.\n\n Logically `true` is not equal to [`false`].\n\n ## Control structures that check for **true**\n\n Several of Sway's control structures will check for a `bool` condition evaluating to **true**.\n\n * The condition in an [`if`] expression must be of type `bool`.\n Whenever that condition evaluates to **true**, the `if` expression takes\n on the value of the first block. If however, the condition evaluates\n to `false`, the expression takes on value of the `else` block if there is one.\n\n * [`while`] is another control flow construct expecting a `bool`-typed condition.\n As long as the condition evaluates to **true**, the `while` loop will continually\n evaluate its associated block.\n\n * [`match`] arms can have guard clauses on them."]; lsp::hover_request(&server, &hover).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1692,7 +1694,7 @@ async fn rename() { new_name: "NEW_TYPE_NAME", // from ZERO_B256 }; assert_eq!(lsp::prepare_rename_request(&server, &rename).await, None); - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } #[tokio::test] @@ -1741,7 +1743,7 @@ macro_rules! test_lsp_capability { // Call the specific LSP capability function that was passed in. let _ = $capability(&server, &uri).await; - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; }}; } @@ -1900,5 +1902,5 @@ async fn write_all_example_asts() { lsp::show_ast_request(&server, &uri, "typed", example_dir).await; } } - let _ = server.shutdown_server(); + let _ = server.shutdown_server().await; } From cac6a639441370454ebd9d44bbc4b02e3f656b0f Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 13:36:51 +1100 Subject: [PATCH 15/40] compilation checks --- sway-core/src/lib.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 0ec651bf9bf..0c9acf57f93 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -607,11 +607,10 @@ pub fn compile_to_ast( package_name: &str, retrigger_compilation: Option>, ) -> Result { + check_should_abort(handler, retrigger_compilation.clone(), 610)?; let query_engine = engines.qe(); let mut metrics = PerformanceData::default(); - check_should_abort(handler, retrigger_compilation.clone(), 613)?; - if let Some(config) = build_config { let path = config.canonical_root_module(); let include_tests = config.include_tests; @@ -625,6 +624,7 @@ pub fn compile_to_ast( let new_handler = Handler::from_parts(warnings, errors); handler.append(new_handler); + eprintln!("re-using cached prgram data, returning from compilation"); return Ok(entry.programs); }; } @@ -963,13 +963,13 @@ fn module_return_path_analysis( } fn check_should_abort(handler: &Handler, retrigger_compilation: Option>, line: u32) -> Result<(), ErrorEmitted> { - eprintln!("check_should_abort at line {}.", line); + //eprintln!("check_should_abort at line {}.", line); if let Some(ref retrigger_compilation) = retrigger_compilation { if retrigger_compilation.load(Ordering::SeqCst) { eprintln!("Aborting compilation due to retrigger as line {}.", line); return Err(handler.cancel()); } else { - eprintln!("Continuing compilation at line {}.", line); + //eprintln!("Continuing compilation at line {}.", line); } } else { eprintln!("retrigger_compilation is None at line {}.", line); From 48b15ebc257691babb9f7c65ddeca0e08a84dc4e Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 15:42:04 +1100 Subject: [PATCH 16/40] fix compilation benchmarks --- sway-lsp/benches/lsp_benchmarks/compile.rs | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/sway-lsp/benches/lsp_benchmarks/compile.rs b/sway-lsp/benches/lsp_benchmarks/compile.rs index 6a44c0db6a1..0c3c7d9084e 100644 --- a/sway-lsp/benches/lsp_benchmarks/compile.rs +++ b/sway-lsp/benches/lsp_benchmarks/compile.rs @@ -1,39 +1,33 @@ use criterion::{black_box, criterion_group, Criterion}; use lsp_types::Url; use sway_core::Engines; -use sway_lsp::core::session::{self, Session}; -use tokio::runtime::Runtime; +use sway_lsp::core::session; -const NUM_DID_CHANGE_ITERATIONS: usize = 4; +const NUM_DID_CHANGE_ITERATIONS: usize = 10; fn benchmarks(c: &mut Criterion) { // Load the test project let uri = Url::from_file_path(super::benchmark_dir().join("src/main.sw")).unwrap(); - let session = Runtime::new().unwrap().block_on(async { - let session = Session::new(); - session.handle_open_file(&uri).await; - session - }); - c.bench_function("compile", |b| { b.iter(|| { let engines = Engines::default(); - let _ = black_box(session::compile(&uri, &engines, None).unwrap()); + let _ = black_box(session::compile(&uri, None, &engines, None).unwrap()); }) }); c.bench_function("traverse", |b| { let engines = Engines::default(); - let results = black_box(session::compile(&uri, &engines, None).unwrap()); + let results = black_box(session::compile(&uri, None, &engines, None).unwrap()); b.iter(|| { let _ = black_box(session::traverse(results.clone(), &engines).unwrap()); }) }); c.bench_function("did_change_with_caching", |b| { + let engines = Engines::default(); b.iter(|| { - for _ in 0..NUM_DID_CHANGE_ITERATIONS { - let _ = black_box(session::compile(&uri, &session.engines.read(), None).unwrap()); + for version in 0..NUM_DID_CHANGE_ITERATIONS { + let _ = black_box(session::compile(&uri, Some(version as i32), &engines, None).unwrap()); } }) }); From 658095b88ee7ae4708ada5ea4f6695f6410461e4 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 15:45:41 +1100 Subject: [PATCH 17/40] fix tests --- sway-core/src/lib.rs | 5 +-- sway-core/src/query_engine/mod.rs | 4 +- sway-lsp/benches/lsp_benchmarks/mod.rs | 2 +- sway-lsp/src/core/session.rs | 15 +++---- sway-lsp/src/handlers/notification.rs | 36 +++++++++------- sway-lsp/src/server_state.rs | 59 ++++++++++++++------------ sway-lsp/tests/integration/lsp.rs | 2 + sway-lsp/tests/lib.rs | 10 ++--- 8 files changed, 68 insertions(+), 65 deletions(-) diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 0c9acf57f93..7272c9cdef8 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -624,7 +624,7 @@ pub fn compile_to_ast( let new_handler = Handler::from_parts(warnings, errors); handler.append(new_handler); - eprintln!("re-using cached prgram data, returning from compilation"); + //eprintln!("re-using cached prgram data, returning from compilation"); return Ok(entry.programs); }; } @@ -963,7 +963,6 @@ fn module_return_path_analysis( } fn check_should_abort(handler: &Handler, retrigger_compilation: Option>, line: u32) -> Result<(), ErrorEmitted> { - //eprintln!("check_should_abort at line {}.", line); if let Some(ref retrigger_compilation) = retrigger_compilation { if retrigger_compilation.load(Ordering::SeqCst) { eprintln!("Aborting compilation due to retrigger as line {}.", line); @@ -972,7 +971,7 @@ fn check_should_abort(handler: &Handler, retrigger_compilation: Option Self { Self { - parse_module_cache: RwLock::new(self.parse_module_cache.read().unwrap().clone()), - programs_cache: RwLock::new(self.programs_cache.read().unwrap().clone()), + parse_module_cache: RwLock::new(self.parse_module_cache.read().expect("Lock is poisoned").clone()), + programs_cache: RwLock::new(self.programs_cache.read().expect("Lock is poisoned").clone()), } } } diff --git a/sway-lsp/benches/lsp_benchmarks/mod.rs b/sway-lsp/benches/lsp_benchmarks/mod.rs index cf4bf9827f0..df1f9a281a3 100644 --- a/sway-lsp/benches/lsp_benchmarks/mod.rs +++ b/sway-lsp/benches/lsp_benchmarks/mod.rs @@ -12,7 +12,7 @@ pub async fn compile_test_project() -> (Url, Arc) { let uri = Url::from_file_path(benchmark_dir().join("src/main.sw")).unwrap(); session.handle_open_file(&uri).await; // Compile the project and write the parse result to the session - let parse_result = session::parse_project(&uri, &session.engines.read(), None).unwrap(); + let parse_result = session::parse_project(&uri, None, &session.engines.read(), None).unwrap(); session.write_parse_result(parse_result); (uri, Arc::new(session)) } diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 2ed2cc37b5b..6e88e8b7bf8 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -149,7 +149,7 @@ impl Session { self.runnables.clear(); self.metrics.clear(); - eprintln!("THREAD | success, about to token map"); + //eprintln!("THREAD | success, about to token map"); res.token_map.deref().iter().for_each(|item| { let (i, t) = item.pair(); self.token_map.insert(i.clone(), t.clone()); @@ -161,7 +161,7 @@ impl Session { }); let (errors, warnings) = res.diagnostics; - eprintln!("THREAD | success, about to write diagnostics"); + //eprintln!("THREAD | success, about to write diagnostics"); *self.diagnostics.write() = capabilities::diagnostic::get_diagnostics( &warnings, &errors, @@ -173,7 +173,7 @@ impl Session { self.engines.read().de(), self.engines.read().se(), ); - eprintln!("THREAD | success, about to write programs"); + //eprintln!("THREAD | success, about to write programs"); self.compiled_program.write().lexed = Some(res.lexed); self.compiled_program.write().parsed = Some(res.parsed); self.compiled_program.write().typed = Some(res.typed); @@ -439,10 +439,7 @@ pub fn compile( engines: &Engines, retrigger_compilation: Option>, ) -> Result, Handler)>, LanguageServerError> { - let now = std::time::Instant::now(); - eprintln!("loading build plan for version {:?}", version); let build_plan = build_plan(uri)?; - eprintln!("build plan loaded, about to compile version {:?} | took {:?}", version, now.elapsed()); let tests_enabled = true; pkg::check( &build_plan, @@ -542,10 +539,10 @@ pub fn traverse( pub fn parse_project(uri: &Url, version: Option, engines: &Engines, retrigger_compilation: Option>) -> Result { let results = compile(uri, version, engines, retrigger_compilation)?; if results.last().is_none() { - eprintln!("compilation failed, returning"); + //eprintln!("compilation failed, returning"); return Err(LanguageServerError::ProgramsIsNone); } else { - eprintln!("compilation successful, starting traversal"); + //eprintln!("compilation successful, starting traversal"); } let TraversalResult { diagnostics, @@ -553,7 +550,7 @@ pub fn parse_project(uri: &Url, version: Option, engines: &Engines, retrigg token_map, metrics, } = traverse(results, engines)?; - eprintln!("traversal successful"); + //eprintln!("traversal successful"); let (lexed, parsed, typed) = programs.ok_or(LanguageServerError::ProgramsIsNone)?; Ok(ParseResult { diagnostics, diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index 5cc29211f84..fa292bf963e 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -13,8 +13,6 @@ pub async fn handle_did_open_text_document( params: DidOpenTextDocumentParams, ) -> Result<(), LanguageServerError> { eprintln!("did_open_text_document"); - - //eprintln!("did_open_text_document"); let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) @@ -24,7 +22,13 @@ pub async fn handle_did_open_text_document( // Otherwise, don't recompile the project when a new file in the project is opened // as the workspace is already compiled. if session.token_map().is_empty() { - send_new_compilation_request(&state, session.clone(), &uri, None); + // send_new_compilation_request(&state, session.clone(), &uri, None); + let _ = state.mpsc_tx.send(ThreadMessage::CompilationData(Shared { + session: Some(session.clone()), + uri: Some(uri.clone()), + version: None, + })); + state.is_compiling.store(true, Ordering::SeqCst); eprintln!("did open - waiting for parsing to finish"); state.wait_for_parsing().await; @@ -39,21 +43,21 @@ fn send_new_compilation_request( uri: &Url, version: Option, ) { - eprintln!("new compilation request: version {:?} - setting is_compiling to true", version); + //eprintln!("new compilation request: version {:?} - setting is_compiling to true", version); if state.is_compiling.load(Ordering::SeqCst) { - eprintln!("retrigger compilation!"); + // eprintln!("retrigger compilation!"); state.retrigger_compilation.store(true, Ordering::SeqCst); } - - // If channel is full, remove the old value so the compilation thread only - // gets the latest value. + + // If channel is full, remove the old value so the compilation + // thread only gets the latest value. if state.mpsc_tx.is_full() { - if let Ok(ThreadMessage::CompilationData(res)) = state.mpsc_rx.try_recv() { - eprintln!("channel is full! discarding version: {:?}", res.version); + if let Ok(ThreadMessage::CompilationData(_)) = state.mpsc_rx.try_recv() { + //eprintln!("channel is full! discarding version: {:?}", res.version); } } - eprintln!("sending new compilation request: version {:?}", version); + //eprintln!("sending new compilation request: version {:?}", version); let _ = state.mpsc_tx.send(ThreadMessage::CompilationData(Shared { session: Some(session.clone()), uri: Some(uri.clone()), @@ -65,17 +69,17 @@ pub async fn handle_did_change_text_document( state: &ServerState, params: DidChangeTextDocumentParams, ) -> Result<(), LanguageServerError> { - eprintln!("did change text document: version: {:?}", params.text_document.version); + //eprintln!("did change text document: version: {:?}", params.text_document.version); document::mark_file_as_dirty(¶ms.text_document.uri).await?; let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await?; - eprintln!("writing changes to file for version: {:?}", params.text_document.version); + //eprintln!("writing changes to file for version: {:?}", params.text_document.version); session .write_changes_to_file(&uri, params.content_changes) .await?; - eprintln!("changes for version {:?} have been written to disk", params.text_document.version); + //eprintln!("changes for version {:?} have been written to disk", params.text_document.version); send_new_compilation_request(&state, session.clone(), &uri, Some(params.text_document.version)); Ok(()) } @@ -84,14 +88,14 @@ pub(crate) async fn handle_did_save_text_document( state: &ServerState, params: DidSaveTextDocumentParams, ) -> Result<(), LanguageServerError> { - eprintln!("did save text document"); + //eprintln!("did save text document"); document::remove_dirty_flag(¶ms.text_document.uri).await?; let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await?; session.sync.resync()?; - eprintln!("resynced"); + //eprintln!("resynced"); send_new_compilation_request(&state, session.clone(), &uri, None); state.wait_for_parsing().await; state.publish_diagnostics(uri, params.text_document.uri, session).await; diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index d33b67ff752..f802a3d4284 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -12,7 +12,10 @@ use dashmap::DashMap; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; -use std::{path::PathBuf, sync::{Arc, atomic::{AtomicBool, Ordering}}, thread::JoinHandle}; +use std::{ + path::PathBuf, + sync::{Arc, atomic::{AtomicBool, Ordering}}, +}; use tower_lsp::{jsonrpc, Client}; /// `ServerState` is the primary mutable state of the language server @@ -22,7 +25,7 @@ pub struct ServerState { pub(crate) keyword_docs: Arc, pub(crate) sessions: Arc, pub(crate) retrigger_compilation: Arc, - pub(crate) is_compiling: Arc, + pub is_compiling: Arc, pub(crate) mpsc_tx: Sender, pub(crate) mpsc_rx: Arc>, pub(crate) finished_compilation: Arc, @@ -34,6 +37,7 @@ pub struct ServerState { pub enum LastCompilationState { Success, Failed, + Uninitialized, } impl Default for ServerState { @@ -50,7 +54,7 @@ impl Default for ServerState { mpsc_tx, mpsc_rx: Arc::new(mpsc_rx), finished_compilation: Arc::new(tokio::sync::Notify::new()), - last_compilation_state: Arc::new(RwLock::new(LastCompilationState::Success)), + last_compilation_state: Arc::new(RwLock::new(LastCompilationState::Uninitialized)), }; state.spawn_compilation_thread(); @@ -77,28 +81,27 @@ fn update_compilation_state( finished_compilation: Arc, rx: Arc>, ) { - eprintln!("THREAD | update_compilation_state"); + //eprintln!("THREAD | update_compilation_state"); is_compiling.store(false, Ordering::SeqCst); - eprintln!("THREAD | is_compiling = {:?}", is_compiling.load(Ordering::SeqCst)); + //eprintln!("THREAD | is_compiling = {:?}", is_compiling.load(Ordering::SeqCst)); retrigger_compilation.store(false, Ordering::SeqCst); - eprintln!("THREAD | retrigger_compilation = {:?}", retrigger_compilation.load(Ordering::SeqCst)); + //eprintln!("THREAD | retrigger_compilation = {:?}", retrigger_compilation.load(Ordering::SeqCst)); // Make sure there isn't any pending compilation work if rx.is_empty() { //eprintln!("THREAD | no pending compilation work, safe to set is_compiling to false"); - eprintln!("THREAD | finished compilation, notifying waiters"); + //eprintln!("THREAD | finished compilation, notifying waiters"); finished_compilation.notify_waiters(); } else { - eprintln!("THREAD | there is pending compilation work"); + //eprintln!("THREAD | there is pending compilation work"); } } impl ServerState { pub fn new(client: Client) -> ServerState { - eprintln!("ServerState::new"); ServerState { client: Some(client), ..Default::default() @@ -115,7 +118,7 @@ impl ServerState { while let Ok(msg) = rx.recv() { match msg { ThreadMessage::CompilationData(shared) => { - eprintln!("THREAD | received new compilation request"); + //eprintln!("THREAD | received new compilation request"); let uri = shared.uri.as_ref().unwrap().clone(); let version = shared.version; @@ -133,28 +136,28 @@ impl ServerState { // } // } is_compiling.store(true, Ordering::SeqCst); - eprintln!("THREAD | starting parsing project: version: {:?}", version); + //eprintln!("THREAD | starting parsing project: version: {:?}", version); match session::parse_project(&uri, version, &engines_clone, Some(retrigger_compilation.clone())) { Ok(parse_result) => { - eprintln!("THREAD | engines_write: {:?}", version); + //eprintln!("THREAD | engines_write: {:?}", version); *session.engines.write() = engines_clone; - eprintln!("THREAD | success, about to write parse results: {:?}", version); + //eprintln!("THREAD | success, about to write parse results: {:?}", version); session.write_parse_result(parse_result); - eprintln!("THREAD | finished writing parse results: {:?}", version); + //eprintln!("THREAD | finished writing parse results: {:?}", version); update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); *last_compilation_state.write() = LastCompilationState::Success; }, Err(err) => { - eprintln!("compilation has returned cancelled {:?}", err); + //eprintln!("compilation has returned cancelled {:?}", err); update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); *last_compilation_state.write() = LastCompilationState::Failed; continue; }, } - eprintln!("THREAD | finished parsing project: version: {:?}", version); + //eprintln!("THREAD | finished parsing project: version: {:?}", version); } ThreadMessage::Terminate => { - eprintln!("THREAD | received terminate message"); + //eprintln!("THREAD | received terminate message"); return; } } @@ -169,38 +172,40 @@ impl ServerState { /// this process until `is_compiling` becomes false. pub async fn wait_for_parsing(&self) { loop { - eprintln!("are we still compiling? | is_compiling = {:?}", self.is_compiling.load(Ordering::SeqCst)); + //eprintln!("are we still compiling? | is_compiling = {:?}", self.is_compiling.load(Ordering::SeqCst)); if !self.is_compiling.load(Ordering::SeqCst) { - eprintln!("compilation is finished, lets check if there are pending compilation requests"); + //eprintln!("compilation is finished, lets check if there are pending compilation requests"); if self.mpsc_rx.is_empty() { - eprintln!("no pending compilation work, safe to break"); + //eprintln!("no pending compilation work, safe to break"); eprintln!("And the last compilation state was: {:?}", &self.last_compilation_state.read()); + break; } else { - eprintln!("there is pending compilation work, lets wait for it to finish"); + //eprintln!("there is pending compilation work, lets wait for it to finish"); } } else { - eprintln!("we are still compiling, lets wait to be notified"); + //eprintln!("we are still compiling, lets wait to be notified"); } self.finished_compilation.notified().await; - eprintln!("we were notified, lets check if we are still compiling"); + //eprintln!("we were notified, lets check if we are still compiling"); } } pub async fn shutdown_server(&self) -> jsonrpc::Result<()> { tracing::info!("Shutting Down the Sway Language Server"); - // set the retrigger_compilation flag to true so that the compilation exit early + // Drain pending compilation requests while let Ok(_) = self.mpsc_rx.try_recv() { - eprintln!("draining pending compilation requests"); + //eprintln!("draining pending compilation requests"); } + // set the retrigger_compilation flag to true so that the compilation exit early self.retrigger_compilation.store(true, Ordering::SeqCst); self.wait_for_parsing().await; - eprintln!("sending terminate message"); + //eprintln!("sending terminate message"); self.mpsc_tx.send(ThreadMessage::Terminate).expect("failed to send terminate message"); - eprintln!("shutting down the sessions"); + //eprintln!("shutting down the sessions"); let _ = self.sessions.iter().map(|item| { let session = item.value(); session.shutdown(); diff --git a/sway-lsp/tests/integration/lsp.rs b/sway-lsp/tests/integration/lsp.rs index d56654550b5..78ddaeb8767 100644 --- a/sway-lsp/tests/integration/lsp.rs +++ b/sway-lsp/tests/integration/lsp.rs @@ -119,6 +119,8 @@ pub(crate) async fn did_change_request( .params(params) .finish(); let response = call_request(service, did_change.clone()).await; + // make sure to set is_compiling to true so the wait_for_parsing method can properly synchnonize + service.inner().is_compiling.store(true, std::sync::atomic::Ordering::SeqCst); assert_eq!(response, Ok(None)); did_change } diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index bece8de531b..45a0cd4c08f 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -101,7 +101,7 @@ async fn did_cache_test() { .custom_method("sway/metrics", ServerState::metrics) .finish(); let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; - let _ = lsp::did_change_request(&mut service, &uri, 1).await; + let _ = lsp::did_change_request(&mut service, &uri, 1).await; service.inner().wait_for_parsing().await; let metrics = lsp::metrics_request(&mut service, &uri).await; assert!(metrics.len() >= 2); @@ -120,8 +120,6 @@ async fn did_change_stress_test() { .custom_method("sway/metrics", ServerState::metrics) .finish(); let bench_dir = sway_workspace_dir().join("sway-lsp/tests/fixtures/benchmark"); - - let now = std::time::Instant::now(); let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await; let times = 400; for version in 0..times { @@ -136,17 +134,15 @@ async fn did_change_stress_test() { } } - // if rand::random::() < 220 { - // let random_duration = rand::random::() as u64 % 80; + // if rand::random::() < 230 { + // let random_duration = rand::random::() as u64 % 10; // std::thread::sleep(std::time::Duration::from_millis(random_duration)); // } else { // let random_duration = rand::random::() % 3000; // std::thread::sleep(std::time::Duration::from_millis(random_duration)); // } } - eprintln!("SHUTTING DOWN!..."); shutdown_and_exit(&mut service).await; - eprintln!("did_change_stress_test took: {:?}", now.elapsed()); } #[tokio::test] From 57edd74773a48ff94fcf93d0129b5ce72dc7253c Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 15:49:27 +1100 Subject: [PATCH 18/40] remove old code --- sway-lsp/src/core/session.rs | 4 ---- sway-lsp/src/server_state.rs | 46 ------------------------------------ 2 files changed, 50 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 6e88e8b7bf8..ec11d233235 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -75,9 +75,6 @@ pub struct Session { pub compiled_program: RwLock, pub engines: RwLock, pub sync: SyncWorkspace, - // Limit the number of threads that can wait to parse at the same time. One thread can be parsing - // and one thread can be waiting to start parsing. All others will return the cached diagnostics. - pub parse_permits: Arc, // Cached diagnostic results that require a lock to access. Readers will wait for writers to complete. pub diagnostics: Arc>, pub metrics: DashMap, @@ -99,7 +96,6 @@ impl Session { compiled_program: RwLock::new(Default::default()), engines: <_>::default(), sync: SyncWorkspace::new(), - parse_permits: Arc::new(Semaphore::new(2)), diagnostics: Arc::new(RwLock::new(DiagnosticMap::new())), } } diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index f802a3d4284..4a80cf7dc51 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -29,7 +29,6 @@ pub struct ServerState { pub(crate) mpsc_tx: Sender, pub(crate) mpsc_rx: Arc>, pub(crate) finished_compilation: Arc, - pub(crate) last_compilation_state: Arc>, } @@ -263,51 +262,6 @@ impl ServerState { } } - - -// /// Runs parse_project in a blocking thread, because parsing is not async. -// async fn run_blocking_parse_project( -// uri: Url, -// version: Option, -// session: Arc, -// retrigger_compilation: Option>, -// ) -> Result<(), LanguageServerError> { -// // Acquire a permit to parse the project. If there are none available, return false. This way, -// // we avoid publishing the same diagnostics multiple times. -// if session.parse_permits.try_acquire().is_err() { -// return Err(LanguageServerError::UnableToAcquirePermit); -// } -// tokio::task::spawn_blocking(move || { -// // Lock the diagnostics result to prevent multiple threads from parsing the project at the same time. -// let _ = session.diagnostics.write(); - -// if let Some(version) = version { -// // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. -// if version % 10 == 0 { -// if let Err(err) = session.garbage_collect() { -// tracing::error!("Unable to perform garbage collection: {}", err.to_string()); -// } -// } -// } -// let now = std::time::Instant::now(); -// let engines_clone = session.engines.read().clone(); -// eprintln!("parse_project: engines_clone: {:?}", now.elapsed()); - -// let now = std::time::Instant::now(); -// let parse_result = session::parse_project(&uri, &engines_clone, retrigger_compilation)?; -// eprintln!("compilation_took: {:?}", now.elapsed()); - -// let now = std::time::Instant::now(); -// *session.engines.write() = engines_clone; -// eprintln!("parse_project: engines_write: {:?}", now.elapsed()); - -// session.write_parse_result(parse_result); -// Ok(()) -// }) -// .await -// .unwrap_or_else(|_| Err(LanguageServerError::FailedToParse)) -// } - /// `Sessions` is a collection of [Session]s, each of which represents a project /// that has been opened in the users workspace. pub(crate) struct Sessions(DashMap>); From a090eec6d513c2e10cbcd1a9aa4aca47264b6961 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 21 Dec 2023 15:54:09 +1100 Subject: [PATCH 19/40] re-enable gc --- sway-lsp/src/core/session.rs | 2 +- sway-lsp/src/server_state.rs | 21 +++++++++++---------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index ec11d233235..43c0a2b9a4d 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -39,7 +39,7 @@ use sway_core::{ use sway_error::{error::CompileError, handler::Handler, warning::CompileWarning}; use sway_types::{SourceEngine, SourceId, Spanned}; use sway_utils::{helpers::get_sway_files, PerformanceData}; -use tokio::{fs::File, io::AsyncWriteExt, sync::Semaphore}; +use tokio::{fs::File, io::AsyncWriteExt}; pub type Documents = DashMap; pub type ProjectDirectory = PathBuf; diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 4a80cf7dc51..491b4b3da0d 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -124,16 +124,17 @@ impl ServerState { let session = shared.session.as_ref().unwrap().clone(); let mut engines_clone = session.engines.read().clone(); - // if let Some(version) = version { - // // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. - // if version % 10 == 0 { - // // Call this on the engines clone so we don't clear types that are still in use - // // and might be needed in the case cancel compilation was triggered. - // if let Err(err) = session.garbage_collect(&mut engines_clone) { - // tracing::error!("Unable to perform garbage collection: {}", err.to_string()); - // } - // } - // } + if let Some(version) = version { + // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. + if version % 10 == 0 { + // Call this on the engines clone so we don't clear types that are still in use + // and might be needed in the case cancel compilation was triggered. + if let Err(err) = session.garbage_collect(&mut engines_clone) { + tracing::error!("Unable to perform garbage collection: {}", err.to_string()); + } + } + } + is_compiling.store(true, Ordering::SeqCst); //eprintln!("THREAD | starting parsing project: version: {:?}", version); match session::parse_project(&uri, version, &engines_clone, Some(retrigger_compilation.clone())) { From 47cdc5c44525b23d6f869d08e78894e9dfae289c Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 22 Dec 2023 11:29:07 +1100 Subject: [PATCH 20/40] re-enable on enter --- sway-lsp/src/handlers/request.rs | 5 ----- sway-lsp/src/lib.rs | 2 +- sway-lsp/src/server_state.rs | 22 +++++++++++----------- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/sway-lsp/src/handlers/request.rs b/sway-lsp/src/handlers/request.rs index 9b0e1b7f1a4..65aec94d30a 100644 --- a/sway-lsp/src/handlers/request.rs +++ b/sway-lsp/src/handlers/request.rs @@ -52,7 +52,6 @@ pub async fn handle_document_symbol( state: &ServerState, params: lsp_types::DocumentSymbolParams, ) -> Result> { - eprintln!("document_symbol"); let _ = state.wait_for_parsing().await; match state .sessions @@ -75,7 +74,6 @@ pub async fn handle_goto_definition( state: &ServerState, params: lsp_types::GotoDefinitionParams, ) -> Result> { - eprintln!("goto_definition"); match state .sessions .uri_and_session_from_workspace(¶ms.text_document_position_params.text_document.uri) @@ -281,7 +279,6 @@ pub async fn handle_semantic_tokens_range( state: &ServerState, params: SemanticTokensRangeParams, ) -> Result> { - eprintln!("semantic_tokens_range"); let _ = state.wait_for_parsing().await; match state .sessions @@ -304,7 +301,6 @@ pub async fn handle_semantic_tokens_full( state: &ServerState, params: SemanticTokensParams, ) -> Result> { - eprintln!("semantic_tokens_full"); let _ = state.wait_for_parsing().await; match state .sessions @@ -327,7 +323,6 @@ pub(crate) async fn handle_inlay_hints( state: &ServerState, params: InlayHintParams, ) -> Result>> { - eprintln!("inlay_hints"); let _ = state.wait_for_parsing().await; match state .sessions diff --git a/sway-lsp/src/lib.rs b/sway-lsp/src/lib.rs index a11b4c0a364..85a29567a28 100644 --- a/sway-lsp/src/lib.rs +++ b/sway-lsp/src/lib.rs @@ -27,7 +27,7 @@ pub async fn start() { let (service, socket) = LspService::build(ServerState::new) .custom_method("sway/show_ast", ServerState::show_ast) .custom_method("sway/visualize", ServerState::visualize) - // .custom_method("sway/on_enter", ServerState::on_enter) + .custom_method("sway/on_enter", ServerState::on_enter) .custom_method("sway/metrics", ServerState::metrics) .finish(); Server::new(tokio::io::stdin(), tokio::io::stdout(), socket) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 491b4b3da0d..32bfb6620dd 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -124,17 +124,17 @@ impl ServerState { let session = shared.session.as_ref().unwrap().clone(); let mut engines_clone = session.engines.read().clone(); - if let Some(version) = version { - // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. - if version % 10 == 0 { - // Call this on the engines clone so we don't clear types that are still in use - // and might be needed in the case cancel compilation was triggered. - if let Err(err) = session.garbage_collect(&mut engines_clone) { - tracing::error!("Unable to perform garbage collection: {}", err.to_string()); - } - } - } - + // if let Some(version) = version { + // // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. + // if version % 10 == 0 { + // // Call this on the engines clone so we don't clear types that are still in use + // // and might be needed in the case cancel compilation was triggered. + // if let Err(err) = session.garbage_collect(&mut engines_clone) { + // tracing::error!("Unable to perform garbage collection: {}", err.to_string()); + // } + // } + // } + is_compiling.store(true, Ordering::SeqCst); //eprintln!("THREAD | starting parsing project: version: {:?}", version); match session::parse_project(&uri, version, &engines_clone, Some(retrigger_compilation.clone())) { From 6e797c0358a8a2fe6f559dea8963cf4ead5a638a Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 4 Jan 2024 09:37:04 +1100 Subject: [PATCH 21/40] using local version of std for benchmark example --- sway-lsp/tests/fixtures/benchmark/Forc.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sway-lsp/tests/fixtures/benchmark/Forc.toml b/sway-lsp/tests/fixtures/benchmark/Forc.toml index 9f8988d6305..aae9e285313 100644 --- a/sway-lsp/tests/fixtures/benchmark/Forc.toml +++ b/sway-lsp/tests/fixtures/benchmark/Forc.toml @@ -6,4 +6,4 @@ name = "sway_project" implicit-std = false [dependencies] -std = { git = "https://github.com/FuelLabs/sway", tag = "v0.48.1" } +std = { path = "../../../../sway-lib-std" } \ No newline at end of file From 1b71afcdbbe3babbb0eb2d9a8f1239de045ce095 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 4 Jan 2024 12:32:15 +1100 Subject: [PATCH 22/40] rebase master From da82c9111a937e7f4ddc81d266d17a271b1a8d2c Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 4 Jan 2024 13:25:03 +1100 Subject: [PATCH 23/40] renable gc --- sway-lsp/src/server_state.rs | 21 +++++++++++---------- sway-lsp/tests/lib.rs | 16 ++++++++-------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 32bfb6620dd..cb729a11722 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -124,16 +124,17 @@ impl ServerState { let session = shared.session.as_ref().unwrap().clone(); let mut engines_clone = session.engines.read().clone(); - // if let Some(version) = version { - // // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. - // if version % 10 == 0 { - // // Call this on the engines clone so we don't clear types that are still in use - // // and might be needed in the case cancel compilation was triggered. - // if let Err(err) = session.garbage_collect(&mut engines_clone) { - // tracing::error!("Unable to perform garbage collection: {}", err.to_string()); - // } - // } - // } + if let Some(version) = version { + // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. + if version % 10 == 0 { + eprintln!("Garbage collecting"); + // Call this on the engines clone so we don't clear types that are still in use + // and might be needed in the case cancel compilation was triggered. + if let Err(err) = session.garbage_collect(&mut engines_clone) { + tracing::error!("Unable to perform garbage collection: {}", err.to_string()); + } + } + } is_compiling.store(true, Ordering::SeqCst); //eprintln!("THREAD | starting parsing project: version: {:?}", version); diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 45a0cd4c08f..7c38088a3e1 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -121,7 +121,7 @@ async fn did_change_stress_test() { .finish(); let bench_dir = sway_workspace_dir().join("sway-lsp/tests/fixtures/benchmark"); let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await; - let times = 400; + let times = 4000; for version in 0..times { let _ = lsp::did_change_request(&mut service, &uri, version + 1).await; if version == 0 { @@ -134,13 +134,13 @@ async fn did_change_stress_test() { } } - // if rand::random::() < 230 { - // let random_duration = rand::random::() as u64 % 10; - // std::thread::sleep(std::time::Duration::from_millis(random_duration)); - // } else { - // let random_duration = rand::random::() % 3000; - // std::thread::sleep(std::time::Duration::from_millis(random_duration)); - // } + if rand::random::() < 230 { + let random_duration = rand::random::() as u64 % 10; + std::thread::sleep(std::time::Duration::from_millis(random_duration)); + } else { + let random_duration = rand::random::() % 3000; + std::thread::sleep(std::time::Duration::from_millis(random_duration)); + } } shutdown_and_exit(&mut service).await; } From 29591c5d9c1008d54d6f0b97ae57812accf35a24 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Thu, 4 Jan 2024 14:41:02 +1100 Subject: [PATCH 24/40] rebase esdrubal/concurrent_slab_map --- sway-core/src/type_system/engine.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/sway-core/src/type_system/engine.rs b/sway-core/src/type_system/engine.rs index c6049d19b27..f09630fcb86 100644 --- a/sway-core/src/type_system/engine.rs +++ b/sway-core/src/type_system/engine.rs @@ -26,7 +26,6 @@ impl Clone for TypeEngine { fn clone(&self) -> Self { TypeEngine { slab: self.slab.clone(), - slab_source_ids: self.slab_source_ids.clone(), id_map: RwLock::new(self.id_map.read().expect("Lock is poisoned").clone()), } } From 4863bbf824fd643ac8bad82adb5fcd77d3c2f2ef Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 09:40:54 +1100 Subject: [PATCH 25/40] rebase and fmt --- forc-pkg/src/pkg.rs | 2 +- forc/src/ops/forc_check.rs | 9 ++- sway-core/src/ir_generation/const_eval.rs | 2 +- sway-core/src/lib.rs | 16 +++-- sway-core/src/query_engine/mod.rs | 19 +++++- sway-lsp/benches/lsp_benchmarks/compile.rs | 4 +- sway-lsp/src/capabilities/hover/mod.rs | 7 +- sway-lsp/src/capabilities/semantic_tokens.rs | 23 ++++--- sway-lsp/src/core/session.rs | 32 +++++---- sway-lsp/src/handlers/notification.rs | 31 ++++++--- sway-lsp/src/handlers/request.rs | 43 +++++++------ sway-lsp/src/lib.rs | 5 +- sway-lsp/src/server.rs | 5 +- sway-lsp/src/server_state.rs | 68 +++++++++++++------- sway-lsp/tests/integration/lsp.rs | 5 +- sway-lsp/tests/lib.rs | 2 +- sway-types/src/source_engine.rs | 28 ++++++-- 17 files changed, 207 insertions(+), 94 deletions(-) diff --git a/forc-pkg/src/pkg.rs b/forc-pkg/src/pkg.rs index 7f3a3899387..e3965ab6661 100644 --- a/forc-pkg/src/pkg.rs +++ b/forc-pkg/src/pkg.rs @@ -23,7 +23,7 @@ use std::{ io::Write, path::{Path, PathBuf}, str::FromStr, - sync::{Arc, atomic::AtomicBool}, + sync::{atomic::AtomicBool, Arc}, }; pub use sway_core::Programs; use sway_core::{ diff --git a/forc/src/ops/forc_check.rs b/forc/src/ops/forc_check.rs index d8805a3c07a..fd25682f177 100644 --- a/forc/src/ops/forc_check.rs +++ b/forc/src/ops/forc_check.rs @@ -34,7 +34,14 @@ pub fn check(command: CheckCommand, engines: &Engines) -> Result<(Option>, line: u32) -> Result<(), ErrorEmitted> { +fn check_should_abort( + handler: &Handler, + retrigger_compilation: Option>, + line: u32, +) -> Result<(), ErrorEmitted> { if let Some(ref retrigger_compilation) = retrigger_compilation { if retrigger_compilation.load(Ordering::SeqCst) { eprintln!("Aborting compilation due to retrigger as line {}.", line); @@ -976,7 +985,6 @@ fn check_should_abort(handler: &Handler, retrigger_compilation: Option Self { Self { - parse_module_cache: RwLock::new(self.parse_module_cache.read().expect("Lock is poisoned").clone()), - programs_cache: RwLock::new(self.programs_cache.read().expect("Lock is poisoned").clone()), + parse_module_cache: RwLock::new( + self.parse_module_cache + .read() + .expect("Lock is poisoned") + .clone(), + ), + programs_cache: RwLock::new( + self.programs_cache + .read() + .expect("Lock is poisoned") + .clone(), + ), } } } @@ -76,7 +86,10 @@ impl QueryEngine { } pub fn get_programs_cache_entry(&self, path: &Arc) -> Option { - let cache = self.programs_cache.read().expect("Failed to read programs cache"); + let cache = self + .programs_cache + .read() + .expect("Failed to read programs cache"); cache.get(path).cloned() } diff --git a/sway-lsp/benches/lsp_benchmarks/compile.rs b/sway-lsp/benches/lsp_benchmarks/compile.rs index 0c3c7d9084e..e868d9d2d85 100644 --- a/sway-lsp/benches/lsp_benchmarks/compile.rs +++ b/sway-lsp/benches/lsp_benchmarks/compile.rs @@ -27,7 +27,9 @@ fn benchmarks(c: &mut Criterion) { let engines = Engines::default(); b.iter(|| { for version in 0..NUM_DID_CHANGE_ITERATIONS { - let _ = black_box(session::compile(&uri, Some(version as i32), &engines, None).unwrap()); + let _ = black_box( + session::compile(&uri, Some(version as i32), &engines, None).unwrap(), + ); } }) }); diff --git a/sway-lsp/src/capabilities/hover/mod.rs b/sway-lsp/src/capabilities/hover/mod.rs index f875410745e..b3fea617ab3 100644 --- a/sway-lsp/src/capabilities/hover/mod.rs +++ b/sway-lsp/src/capabilities/hover/mod.rs @@ -64,7 +64,12 @@ pub fn hover_data( None => (ident, token), }; - let contents = hover_format(session.clone(), &session.engines.read(), &decl_token, &decl_ident.name); + let contents = hover_format( + session.clone(), + &session.engines.read(), + &decl_token, + &decl_ident.name, + ); Some(lsp_types::Hover { contents, range: Some(range), diff --git a/sway-lsp/src/capabilities/semantic_tokens.rs b/sway-lsp/src/capabilities/semantic_tokens.rs index 894c9c938bf..8d867eae3c9 100644 --- a/sway-lsp/src/capabilities/semantic_tokens.rs +++ b/sway-lsp/src/capabilities/semantic_tokens.rs @@ -4,7 +4,7 @@ use crate::core::{ }; use lsp_types::{ Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, - SemanticTokensResult, Url, SemanticTokensRangeResult, + SemanticTokensRangeResult, SemanticTokensResult, Url, }; use std::sync::{ atomic::{AtomicU32, Ordering}, @@ -24,14 +24,21 @@ pub fn semantic_tokens_full(session: Arc, url: &Url) -> Option, url: &Url, range: &Range) -> Option { +pub fn semantic_tokens_range( + session: Arc, + url: &Url, + range: &Range, +) -> Option { eprintln!("semantic_tokens_range: range: {:#?}", range); - let mut tokens_sorted: Vec<_> = session.token_map().tokens_for_file(url) - .filter(|t| { - // make sure the tokenident range is within the range that was passed in - let token_range = t.0.range; - token_range.start >= range.start && token_range.end <= range.end - }).collect(); + let mut tokens_sorted: Vec<_> = session + .token_map() + .tokens_for_file(url) + .filter(|t| { + // make sure the tokenident range is within the range that was passed in + let token_range = t.0.range; + token_range.start >= range.start && token_range.end <= range.end + }) + .collect(); eprintln!("Number of tokens in range: {}", tokens_sorted.len()); tokens_sorted.sort_by(|(a_span, _), (b_span, _)| { let a = (a_span.range.start, a_span.range.end); diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 43c0a2b9a4d..41af7156c16 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -25,7 +25,11 @@ use lsp_types::{ use parking_lot::RwLock; use pkg::{manifest::ManifestFile, BuildPlan}; use rayon::iter::{ParallelBridge, ParallelIterator}; -use std::{ops::Deref, path::PathBuf, sync::{Arc, atomic::AtomicBool}}; +use std::{ + ops::Deref, + path::PathBuf, + sync::{atomic::AtomicBool, Arc}, +}; use sway_core::{ decl_engine::DeclEngine, language::{ @@ -158,11 +162,8 @@ impl Session { let (errors, warnings) = res.diagnostics; //eprintln!("THREAD | success, about to write diagnostics"); - *self.diagnostics.write() = capabilities::diagnostic::get_diagnostics( - &warnings, - &errors, - self.engines.read().se(), - ); + *self.diagnostics.write() = + capabilities::diagnostic::get_diagnostics(&warnings, &errors, self.engines.read().se()); self.create_runnables( &res.typed, @@ -219,9 +220,12 @@ impl Session { character: position.character - trigger_char.len() as u32 - 1, }; let (ident_to_complete, _) = self.token_map.token_at_position(uri, shifted_position)?; - let fn_tokens = - self.token_map - .tokens_at_position(self.engines.read().se(), uri, shifted_position, Some(true)); + let fn_tokens = self.token_map.tokens_at_position( + self.engines.read().se(), + uri, + shifted_position, + Some(true), + ); let (_, fn_token) = fn_tokens.first()?; let compiled_program = &*self.compiled_program.read(); if let Some(TypedAstToken::TypedFunctionDeclaration(fn_decl)) = fn_token.typed.clone() { @@ -532,7 +536,12 @@ pub fn traverse( } /// Parses the project and returns true if the compiler diagnostics are new and should be published. -pub fn parse_project(uri: &Url, version: Option, engines: &Engines, retrigger_compilation: Option>) -> Result { +pub fn parse_project( + uri: &Url, + version: Option, + engines: &Engines, + retrigger_compilation: Option>, +) -> Result { let results = compile(uri, version, engines, retrigger_compilation)?; if results.last().is_none() { //eprintln!("compilation failed, returning"); @@ -622,7 +631,8 @@ mod tests { let dir = get_absolute_path("sway-lsp/tests/fixtures"); let uri = get_url(&dir); let engines = Engines::default(); - let result = parse_project(&uri, None, &engines, None).expect_err("expected ManifestFileNotFound"); + let result = + parse_project(&uri, None, &engines, None).expect_err("expected ManifestFileNotFound"); assert!(matches!( result, LanguageServerError::DocumentError( diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index fa292bf963e..24e10238759 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -1,12 +1,16 @@ //! This module is responsible for implementing handlers for Language Server //! Protocol. This module specifically handles notification messages sent by the Client. -use std::sync::{atomic::Ordering, Arc}; -use crate::{core::{document, session::Session}, error::LanguageServerError, server_state::{ServerState, Shared, ThreadMessage}}; +use crate::{ + core::{document, session::Session}, + error::LanguageServerError, + server_state::{ServerState, Shared, ThreadMessage}, +}; use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, FileChangeType, Url, }; +use std::sync::{atomic::Ordering, Arc}; pub async fn handle_did_open_text_document( state: &ServerState, @@ -27,12 +31,14 @@ pub async fn handle_did_open_text_document( session: Some(session.clone()), uri: Some(uri.clone()), version: None, - })); + })); state.is_compiling.store(true, Ordering::SeqCst); eprintln!("did open - waiting for parsing to finish"); state.wait_for_parsing().await; - state.publish_diagnostics(uri, params.text_document.uri, session).await; + state + .publish_diagnostics(uri, params.text_document.uri, session) + .await; } Ok(()) } @@ -45,11 +51,11 @@ fn send_new_compilation_request( ) { //eprintln!("new compilation request: version {:?} - setting is_compiling to true", version); if state.is_compiling.load(Ordering::SeqCst) { - // eprintln!("retrigger compilation!"); + // eprintln!("retrigger compilation!"); state.retrigger_compilation.store(true, Ordering::SeqCst); } - - // If channel is full, remove the old value so the compilation + + // If channel is full, remove the old value so the compilation // thread only gets the latest value. if state.mpsc_tx.is_full() { if let Ok(ThreadMessage::CompilationData(_)) = state.mpsc_rx.try_recv() { @@ -80,7 +86,12 @@ pub async fn handle_did_change_text_document( .write_changes_to_file(&uri, params.content_changes) .await?; //eprintln!("changes for version {:?} have been written to disk", params.text_document.version); - send_new_compilation_request(&state, session.clone(), &uri, Some(params.text_document.version)); + send_new_compilation_request( + &state, + session.clone(), + &uri, + Some(params.text_document.version), + ); Ok(()) } @@ -98,7 +109,9 @@ pub(crate) async fn handle_did_save_text_document( //eprintln!("resynced"); send_new_compilation_request(&state, session.clone(), &uri, None); state.wait_for_parsing().await; - state.publish_diagnostics(uri, params.text_document.uri, session).await; + state + .publish_diagnostics(uri, params.text_document.uri, session) + .await; Ok(()) } diff --git a/sway-lsp/src/handlers/request.rs b/sway-lsp/src/handlers/request.rs index 65aec94d30a..29d24b180b0 100644 --- a/sway-lsp/src/handlers/request.rs +++ b/sway-lsp/src/handlers/request.rs @@ -8,7 +8,8 @@ use forc_tracing::{init_tracing_subscriber, TracingSubscriberOptions, TracingWri use lsp_types::{ CodeLens, CompletionResponse, DocumentFormattingParams, DocumentSymbolResponse, InitializeResult, InlayHint, InlayHintParams, PrepareRenameResponse, RenameParams, - SemanticTokensParams, SemanticTokensResult, TextDocumentIdentifier, Url, WorkspaceEdit, SemanticTokensRangeResult, SemanticTokensRangeParams, + SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult, + SemanticTokensResult, TextDocumentIdentifier, Url, WorkspaceEdit, }; use std::{ fs::File, @@ -58,11 +59,9 @@ pub async fn handle_document_symbol( .uri_and_session_from_workspace(¶ms.text_document.uri) .await { - Ok((uri, session)) => { - Ok(session - .symbol_information(&uri) - .map(DocumentSymbolResponse::Flat)) - } + Ok((uri, session)) => Ok(session + .symbol_information(&uri) + .map(DocumentSymbolResponse::Flat)), Err(err) => { tracing::error!("{}", err.to_string()); Ok(None) @@ -265,9 +264,7 @@ pub async fn handle_code_lens( .uri_and_session_from_workspace(¶ms.text_document.uri) .await { - Ok((url, session)) => { - Ok(Some(capabilities::code_lens::code_lens(&session, &url))) - } + Ok((url, session)) => Ok(Some(capabilities::code_lens::code_lens(&session, &url))), Err(err) => { tracing::error!("{}", err.to_string()); Ok(None) @@ -285,11 +282,11 @@ pub async fn handle_semantic_tokens_range( .uri_and_session_from_workspace(¶ms.text_document.uri) .await { - Ok((uri, session)) => { - Ok(capabilities::semantic_tokens::semantic_tokens_range( - session, &uri, ¶ms.range, - )) - } + Ok((uri, session)) => Ok(capabilities::semantic_tokens::semantic_tokens_range( + session, + &uri, + ¶ms.range, + )), Err(err) => { tracing::error!("{}", err.to_string()); Ok(None) @@ -307,11 +304,9 @@ pub async fn handle_semantic_tokens_full( .uri_and_session_from_workspace(¶ms.text_document.uri) .await { - Ok((uri, session)) => { - Ok(capabilities::semantic_tokens::semantic_tokens_full( - session, &uri, - )) - } + Ok((uri, session)) => Ok(capabilities::semantic_tokens::semantic_tokens_full( + session, &uri, + )), Err(err) => { tracing::error!("{}", err.to_string()); Ok(None) @@ -385,7 +380,11 @@ pub async fn handle_show_ast( // Returns true if the current path matches the path of a submodule let path_is_submodule = |ident: &Ident, path: &Option| -> bool { - ident.span().source_id().map(|p| session.engines.read().se().get_path(p)) == *path + ident + .span() + .source_id() + .map(|p| session.engines.read().se().get_path(p)) + == *path }; let ast_path = PathBuf::from(params.save_path.path()); @@ -507,7 +506,9 @@ pub(crate) async fn metrics( Ok((_, session)) => { let mut metrics = vec![]; for kv in session.metrics.iter() { - let path = session.engines.read() + let path = session + .engines + .read() .se() .get_path(kv.key()) .to_string_lossy() diff --git a/sway-lsp/src/lib.rs b/sway-lsp/src/lib.rs index 85a29567a28..f2d60ec5e23 100644 --- a/sway-lsp/src/lib.rs +++ b/sway-lsp/src/lib.rs @@ -16,9 +16,8 @@ pub mod utils; use lsp_types::{ CodeActionProviderCapability, CodeLensOptions, CompletionOptions, ExecuteCommandOptions, - HoverProviderCapability, OneOf, RenameOptions, SemanticTokensLegend, - SemanticTokensOptions, ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, - WorkDoneProgressOptions, + HoverProviderCapability, OneOf, RenameOptions, SemanticTokensLegend, SemanticTokensOptions, + ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, WorkDoneProgressOptions, }; use server_state::ServerState; use tower_lsp::{LspService, Server}; diff --git a/sway-lsp/src/server.rs b/sway-lsp/src/server.rs index dd26faad0b8..240ddf11e37 100644 --- a/sway-lsp/src/server.rs +++ b/sway-lsp/src/server.rs @@ -14,8 +14,9 @@ use lsp_types::{ DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InitializeParams, InitializeResult, InitializedParams, InlayHint, InlayHintParams, - PrepareRenameResponse, RenameParams, SemanticTokensParams, SemanticTokensResult, - TextDocumentIdentifier, TextDocumentPositionParams, TextEdit, WorkspaceEdit, SemanticTokensRangeParams, SemanticTokensRangeResult, + PrepareRenameResponse, RenameParams, SemanticTokensParams, SemanticTokensRangeParams, + SemanticTokensRangeResult, SemanticTokensResult, TextDocumentIdentifier, + TextDocumentPositionParams, TextEdit, WorkspaceEdit, }; use sway_utils::PerformanceData; use tower_lsp::{jsonrpc::Result, LanguageServer}; diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index cb729a11722..ed08df24e8a 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -7,14 +7,17 @@ use crate::{ utils::debug, utils::keyword_docs::KeywordDocs, }; -use crossbeam_channel::{Sender, Receiver}; +use crossbeam_channel::{Receiver, Sender}; use dashmap::DashMap; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; use std::{ - path::PathBuf, - sync::{Arc, atomic::{AtomicBool, Ordering}}, + path::PathBuf, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, }; use tower_lsp::{jsonrpc, Client}; @@ -75,7 +78,7 @@ pub struct Shared { } fn update_compilation_state( - is_compiling: Arc, + is_compiling: Arc, retrigger_compilation: Arc, finished_compilation: Arc, rx: Arc>, @@ -91,7 +94,7 @@ fn update_compilation_state( // Make sure there isn't any pending compilation work if rx.is_empty() { //eprintln!("THREAD | no pending compilation work, safe to set is_compiling to false"); - + //eprintln!("THREAD | finished compilation, notifying waiters"); finished_compilation.notify_waiters(); } else { @@ -123,7 +126,7 @@ impl ServerState { let version = shared.version; let session = shared.session.as_ref().unwrap().clone(); let mut engines_clone = session.engines.read().clone(); - + if let Some(version) = version { // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. if version % 10 == 0 { @@ -131,29 +134,47 @@ impl ServerState { // Call this on the engines clone so we don't clear types that are still in use // and might be needed in the case cancel compilation was triggered. if let Err(err) = session.garbage_collect(&mut engines_clone) { - tracing::error!("Unable to perform garbage collection: {}", err.to_string()); + tracing::error!( + "Unable to perform garbage collection: {}", + err.to_string() + ); } } } - is_compiling.store(true, Ordering::SeqCst); + is_compiling.store(true, Ordering::SeqCst); //eprintln!("THREAD | starting parsing project: version: {:?}", version); - match session::parse_project(&uri, version, &engines_clone, Some(retrigger_compilation.clone())) { + match session::parse_project( + &uri, + version, + &engines_clone, + Some(retrigger_compilation.clone()), + ) { Ok(parse_result) => { //eprintln!("THREAD | engines_write: {:?}", version); *session.engines.write() = engines_clone; //eprintln!("THREAD | success, about to write parse results: {:?}", version); session.write_parse_result(parse_result); //eprintln!("THREAD | finished writing parse results: {:?}", version); - update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); + update_compilation_state( + is_compiling.clone(), + retrigger_compilation.clone(), + finished_compilation.clone(), + rx.clone(), + ); *last_compilation_state.write() = LastCompilationState::Success; - }, + } Err(err) => { //eprintln!("compilation has returned cancelled {:?}", err); - update_compilation_state(is_compiling.clone(), retrigger_compilation.clone(), finished_compilation.clone(), rx.clone()); + update_compilation_state( + is_compiling.clone(), + retrigger_compilation.clone(), + finished_compilation.clone(), + rx.clone(), + ); *last_compilation_state.write() = LastCompilationState::Failed; continue; - }, + } } //eprintln!("THREAD | finished parsing project: version: {:?}", version); } @@ -167,7 +188,7 @@ impl ServerState { } /// Waits asynchronously for the `is_compiling` flag to become false. - /// + /// /// This function checks the state of `is_compiling`, and if it's true, /// it awaits on a notification. Once notified, it checks again, repeating /// this process until `is_compiling` becomes false. @@ -178,7 +199,10 @@ impl ServerState { //eprintln!("compilation is finished, lets check if there are pending compilation requests"); if self.mpsc_rx.is_empty() { //eprintln!("no pending compilation work, safe to break"); - eprintln!("And the last compilation state was: {:?}", &self.last_compilation_state.read()); + eprintln!( + "And the last compilation state was: {:?}", + &self.last_compilation_state.read() + ); break; } else { @@ -204,7 +228,9 @@ impl ServerState { self.wait_for_parsing().await; //eprintln!("sending terminate message"); - self.mpsc_tx.send(ThreadMessage::Terminate).expect("failed to send terminate message"); + self.mpsc_tx + .send(ThreadMessage::Terminate) + .expect("failed to send terminate message"); //eprintln!("shutting down the sessions"); let _ = self.sessions.iter().map(|item| { @@ -225,11 +251,7 @@ impl ServerState { // in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics. if let Some(client) = self.client.as_ref() { client - .publish_diagnostics( - workspace_uri.clone(), - diagnostics, - None, - ) + .publish_diagnostics(workspace_uri.clone(), diagnostics, None) .await; } } @@ -250,7 +272,9 @@ impl ServerState { diagnostics_to_publish = debug::generate_warnings_for_typed_tokens(tokens) } Warnings::Default => { - if let Some(diagnostics) = session.diagnostics.read().get(&PathBuf::from(uri.path())) { + if let Some(diagnostics) = + session.diagnostics.read().get(&PathBuf::from(uri.path())) + { if config.diagnostic.show_warnings { diagnostics_to_publish.extend(diagnostics.warnings.clone()); } diff --git a/sway-lsp/tests/integration/lsp.rs b/sway-lsp/tests/integration/lsp.rs index 78ddaeb8767..f4d10b59927 100644 --- a/sway-lsp/tests/integration/lsp.rs +++ b/sway-lsp/tests/integration/lsp.rs @@ -120,7 +120,10 @@ pub(crate) async fn did_change_request( .finish(); let response = call_request(service, did_change.clone()).await; // make sure to set is_compiling to true so the wait_for_parsing method can properly synchnonize - service.inner().is_compiling.store(true, std::sync::atomic::Ordering::SeqCst); + service + .inner() + .is_compiling + .store(true, std::sync::atomic::Ordering::SeqCst); assert_eq!(response, Ok(None)); did_change } diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 7c38088a3e1..69a73d02207 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -101,7 +101,7 @@ async fn did_cache_test() { .custom_method("sway/metrics", ServerState::metrics) .finish(); let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; - let _ = lsp::did_change_request(&mut service, &uri, 1).await; + let _ = lsp::did_change_request(&mut service, &uri, 1).await; service.inner().wait_for_parsing().await; let metrics = lsp::metrics_request(&mut service, &uri).await; assert!(metrics.len() >= 2); diff --git a/sway-types/src/source_engine.rs b/sway-types/src/source_engine.rs index 40810f8152a..65c145f7414 100644 --- a/sway-types/src/source_engine.rs +++ b/sway-types/src/source_engine.rs @@ -28,11 +28,31 @@ impl Clone for SourceEngine { fn clone(&self) -> Self { SourceEngine { next_source_id: RwLock::new(*self.next_source_id.read().expect("Lock is poisoned")), - path_to_source_map: RwLock::new(self.path_to_source_map.read().expect("Lock is poisoned").clone()), - source_to_path_map: RwLock::new(self.source_to_path_map.read().expect("Lock is poisoned").clone()), + path_to_source_map: RwLock::new( + self.path_to_source_map + .read() + .expect("Lock is poisoned") + .clone(), + ), + source_to_path_map: RwLock::new( + self.source_to_path_map + .read() + .expect("Lock is poisoned") + .clone(), + ), next_module_id: RwLock::new(*self.next_module_id.read().expect("Lock is poisoned")), - path_to_module_map: RwLock::new(self.path_to_module_map.read().expect("Lock is poisoned").clone()), - module_to_sources_map: RwLock::new(self.module_to_sources_map.read().expect("Lock is poisoned").clone()), + path_to_module_map: RwLock::new( + self.path_to_module_map + .read() + .expect("Lock is poisoned") + .clone(), + ), + module_to_sources_map: RwLock::new( + self.module_to_sources_map + .read() + .expect("Lock is poisoned") + .clone(), + ), } } } From d7ea66d5cad1952ccc7a6f9329a39a6debe7e891 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 10:35:12 +1100 Subject: [PATCH 26/40] better names and doc comments --- sway-core/src/decl_engine/engine.rs | 2 +- sway-core/src/query_engine/mod.rs | 14 +--- sway-core/src/type_system/engine.rs | 2 +- sway-lsp/src/capabilities/semantic_tokens.rs | 32 ++++----- sway-lsp/src/handlers/notification.rs | 30 +++++---- sway-lsp/src/server_state.rs | 70 ++++++++++++-------- sway-lsp/tests/fixtures/benchmark/Forc.toml | 2 +- sway-lsp/tests/utils/Cargo.toml | 2 +- sway-types/src/source_engine.rs | 32 ++------- 9 files changed, 87 insertions(+), 99 deletions(-) diff --git a/sway-core/src/decl_engine/engine.rs b/sway-core/src/decl_engine/engine.rs index 9ffd966b9dc..b2a819dd93e 100644 --- a/sway-core/src/decl_engine/engine.rs +++ b/sway-core/src/decl_engine/engine.rs @@ -48,7 +48,7 @@ impl Clone for DeclEngine { constant_slab: self.constant_slab.clone(), enum_slab: self.enum_slab.clone(), type_alias_slab: self.type_alias_slab.clone(), - parents: RwLock::new(self.parents.read().expect("Lock is poisoned").clone()), + parents: RwLock::new(self.parents.read().unwrap().clone()), } } } diff --git a/sway-core/src/query_engine/mod.rs b/sway-core/src/query_engine/mod.rs index 63f7cf059a8..d2397c17bda 100644 --- a/sway-core/src/query_engine/mod.rs +++ b/sway-core/src/query_engine/mod.rs @@ -54,18 +54,8 @@ pub struct QueryEngine { impl Clone for QueryEngine { fn clone(&self) -> Self { Self { - parse_module_cache: RwLock::new( - self.parse_module_cache - .read() - .expect("Lock is poisoned") - .clone(), - ), - programs_cache: RwLock::new( - self.programs_cache - .read() - .expect("Lock is poisoned") - .clone(), - ), + parse_module_cache: RwLock::new(self.parse_module_cache.read().unwrap().clone()), + programs_cache: RwLock::new(self.programs_cache.read().unwrap().clone()), } } } diff --git a/sway-core/src/type_system/engine.rs b/sway-core/src/type_system/engine.rs index f09630fcb86..2f6736e9787 100644 --- a/sway-core/src/type_system/engine.rs +++ b/sway-core/src/type_system/engine.rs @@ -26,7 +26,7 @@ impl Clone for TypeEngine { fn clone(&self) -> Self { TypeEngine { slab: self.slab.clone(), - id_map: RwLock::new(self.id_map.read().expect("Lock is poisoned").clone()), + id_map: RwLock::new(self.id_map.read().unwrap().clone()), } } } diff --git a/sway-lsp/src/capabilities/semantic_tokens.rs b/sway-lsp/src/capabilities/semantic_tokens.rs index 8d867eae3c9..2d348294ff5 100644 --- a/sway-lsp/src/capabilities/semantic_tokens.rs +++ b/sway-lsp/src/capabilities/semantic_tokens.rs @@ -12,40 +12,42 @@ use std::sync::{ }; // https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71 + +/// Get the semantic tokens for the entire file. pub fn semantic_tokens_full(session: Arc, url: &Url) -> Option { - // The tokens need sorting by their span so each token is sequential - // If this step isn't done, then the bit offsets used for the lsp_types::SemanticToken are incorrect. - let mut tokens_sorted: Vec<_> = session.token_map().tokens_for_file(url).collect(); - tokens_sorted.sort_by(|(a_span, _), (b_span, _)| { - let a = (a_span.range.start, a_span.range.end); - let b = (b_span.range.start, b_span.range.end); - a.cmp(&b) - }); - Some(semantic_tokens(&tokens_sorted).into()) + let mut tokens: Vec<_> = session.token_map().tokens_for_file(url).collect(); + sort_tokens(&mut tokens); + Some(semantic_tokens(&tokens).into()) } +/// Get the semantic tokens within a range. pub fn semantic_tokens_range( session: Arc, url: &Url, range: &Range, ) -> Option { - eprintln!("semantic_tokens_range: range: {:#?}", range); - let mut tokens_sorted: Vec<_> = session + let mut tokens: Vec<_> = session .token_map() .tokens_for_file(url) .filter(|t| { - // make sure the tokenident range is within the range that was passed in + // make sure the token_ident range is within the range that was passed in let token_range = t.0.range; token_range.start >= range.start && token_range.end <= range.end }) .collect(); - eprintln!("Number of tokens in range: {}", tokens_sorted.len()); - tokens_sorted.sort_by(|(a_span, _), (b_span, _)| { + sort_tokens(&mut tokens); + Some(semantic_tokens(&tokens).into()) +} + +/// Sort tokens by their span so each token is sequential. +/// +/// If this step isn't done, then the bit offsets used for the lsp_types::SemanticToken are incorrect. +fn sort_tokens(tokens: &mut Vec<(TokenIdent, Token)>) { + tokens.sort_by(|(a_span, _), (b_span, _)| { let a = (a_span.range.start, a_span.range.end); let b = (b_span.range.start, b_span.range.end); a.cmp(&b) }); - Some(semantic_tokens(&tokens_sorted).into()) } //------------------------------- diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index 24e10238759..7de957bb1bb 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -4,7 +4,7 @@ use crate::{ core::{document, session::Session}, error::LanguageServerError, - server_state::{ServerState, Shared, ThreadMessage}, + server_state::{CompilationContext, ServerState, TaskMessage}, }; use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, @@ -27,11 +27,13 @@ pub async fn handle_did_open_text_document( // as the workspace is already compiled. if session.token_map().is_empty() { // send_new_compilation_request(&state, session.clone(), &uri, None); - let _ = state.mpsc_tx.send(ThreadMessage::CompilationData(Shared { - session: Some(session.clone()), - uri: Some(uri.clone()), - version: None, - })); + let _ = state + .cb_tx + .send(TaskMessage::CompilationContext(CompilationContext { + session: Some(session.clone()), + uri: Some(uri.clone()), + version: None, + })); state.is_compiling.store(true, Ordering::SeqCst); eprintln!("did open - waiting for parsing to finish"); @@ -57,18 +59,20 @@ fn send_new_compilation_request( // If channel is full, remove the old value so the compilation // thread only gets the latest value. - if state.mpsc_tx.is_full() { - if let Ok(ThreadMessage::CompilationData(_)) = state.mpsc_rx.try_recv() { + if state.cb_tx.is_full() { + if let Ok(TaskMessage::CompilationContext(_)) = state.cb_rx.try_recv() { //eprintln!("channel is full! discarding version: {:?}", res.version); } } //eprintln!("sending new compilation request: version {:?}", version); - let _ = state.mpsc_tx.send(ThreadMessage::CompilationData(Shared { - session: Some(session.clone()), - uri: Some(uri.clone()), - version, - })); + let _ = state + .cb_tx + .send(TaskMessage::CompilationContext(CompilationContext { + session: Some(session.clone()), + uri: Some(uri.clone()), + version, + })); } pub async fn handle_did_change_text_document( diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index ed08df24e8a..3663597f65b 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -29,23 +29,15 @@ pub struct ServerState { pub(crate) sessions: Arc, pub(crate) retrigger_compilation: Arc, pub is_compiling: Arc, - pub(crate) mpsc_tx: Sender, - pub(crate) mpsc_rx: Arc>, + pub(crate) cb_tx: Sender, + pub(crate) cb_rx: Arc>, pub(crate) finished_compilation: Arc, - pub(crate) last_compilation_state: Arc>, -} - -#[derive(Debug)] -pub enum LastCompilationState { - Success, - Failed, - Uninitialized, + last_compilation_state: Arc>, } impl Default for ServerState { fn default() -> Self { - let (mpsc_tx, mpsc_rx) = crossbeam_channel::bounded(1); - + let (cb_tx, cb_rx) = crossbeam_channel::bounded(1); let state = ServerState { client: None, config: Arc::new(RwLock::new(Default::default())), @@ -53,35 +45,50 @@ impl Default for ServerState { sessions: Arc::new(Sessions(DashMap::new())), retrigger_compilation: Arc::new(AtomicBool::new(false)), is_compiling: Arc::new(AtomicBool::new(false)), - mpsc_tx, - mpsc_rx: Arc::new(mpsc_rx), + cb_tx, + cb_rx: Arc::new(cb_rx), finished_compilation: Arc::new(tokio::sync::Notify::new()), last_compilation_state: Arc::new(RwLock::new(LastCompilationState::Uninitialized)), }; - + // Spawn a new thread dedicated to handling compilation tasks state.spawn_compilation_thread(); state } } +/// `LastCompilationState` represents the state of the last compilation process. +/// It's primarily used for debugging purposes. #[derive(Debug)] -pub enum ThreadMessage { - CompilationData(Shared), +enum LastCompilationState { + Success, + Failed, + Uninitialized, +} + +/// `TaskMessage` represents the set of messages or commands that can be sent to and processed by a worker thread in the compilation environment. +#[derive(Debug)] +pub enum TaskMessage { + CompilationContext(CompilationContext), + // A signal to the receiving thread to gracefully terminate its operation. Terminate, } +/// `CompilationContext` encapsulates all the necessary details required by the compilation thread to execute a compilation process. +/// It acts as a container for shared resources and state information relevant to a specific compilation task. #[derive(Debug, Default)] -pub struct Shared { +pub struct CompilationContext { pub session: Option>, pub uri: Option, pub version: Option, } +/// This function is responsible for managing the compilation flags and signaling +/// the completion of the compilation process if there is no pending compilation work. fn update_compilation_state( is_compiling: Arc, retrigger_compilation: Arc, finished_compilation: Arc, - rx: Arc>, + rx: Arc>, ) { //eprintln!("THREAD | update_compilation_state"); @@ -110,21 +117,26 @@ impl ServerState { } } + /// Spawns a new thread dedicated to handling compilation tasks. This thread listens for + /// `TaskMessage` instances sent over a channel and processes them accordingly. + /// + /// This approach allows for asynchronous compilation tasks to be handled in parallel to + /// the main application flow, improving efficiency and responsiveness. pub fn spawn_compilation_thread(&self) { let is_compiling = self.is_compiling.clone(); let retrigger_compilation = self.retrigger_compilation.clone(); let finished_compilation = self.finished_compilation.clone(); - let rx = self.mpsc_rx.clone(); + let rx = self.cb_rx.clone(); let last_compilation_state = self.last_compilation_state.clone(); std::thread::spawn(move || { while let Ok(msg) = rx.recv() { match msg { - ThreadMessage::CompilationData(shared) => { + TaskMessage::CompilationContext(ctx) => { //eprintln!("THREAD | received new compilation request"); - let uri = shared.uri.as_ref().unwrap().clone(); - let version = shared.version; - let session = shared.session.as_ref().unwrap().clone(); + let uri = ctx.uri.as_ref().unwrap().clone(); + let version = ctx.version; + let session = ctx.session.as_ref().unwrap().clone(); let mut engines_clone = session.engines.read().clone(); if let Some(version) = version { @@ -178,7 +190,7 @@ impl ServerState { } //eprintln!("THREAD | finished parsing project: version: {:?}", version); } - ThreadMessage::Terminate => { + TaskMessage::Terminate => { //eprintln!("THREAD | received terminate message"); return; } @@ -197,7 +209,7 @@ impl ServerState { //eprintln!("are we still compiling? | is_compiling = {:?}", self.is_compiling.load(Ordering::SeqCst)); if !self.is_compiling.load(Ordering::SeqCst) { //eprintln!("compilation is finished, lets check if there are pending compilation requests"); - if self.mpsc_rx.is_empty() { + if self.cb_rx.is_empty() { //eprintln!("no pending compilation work, safe to break"); eprintln!( "And the last compilation state was: {:?}", @@ -220,7 +232,7 @@ impl ServerState { tracing::info!("Shutting Down the Sway Language Server"); // Drain pending compilation requests - while let Ok(_) = self.mpsc_rx.try_recv() { + while let Ok(_) = self.cb_rx.try_recv() { //eprintln!("draining pending compilation requests"); } // set the retrigger_compilation flag to true so that the compilation exit early @@ -228,8 +240,8 @@ impl ServerState { self.wait_for_parsing().await; //eprintln!("sending terminate message"); - self.mpsc_tx - .send(ThreadMessage::Terminate) + self.cb_tx + .send(TaskMessage::Terminate) .expect("failed to send terminate message"); //eprintln!("shutting down the sessions"); diff --git a/sway-lsp/tests/fixtures/benchmark/Forc.toml b/sway-lsp/tests/fixtures/benchmark/Forc.toml index aae9e285313..f2f77e69bcc 100644 --- a/sway-lsp/tests/fixtures/benchmark/Forc.toml +++ b/sway-lsp/tests/fixtures/benchmark/Forc.toml @@ -6,4 +6,4 @@ name = "sway_project" implicit-std = false [dependencies] -std = { path = "../../../../sway-lib-std" } \ No newline at end of file +std = { path = "../../../../sway-lib-std" } diff --git a/sway-lsp/tests/utils/Cargo.toml b/sway-lsp/tests/utils/Cargo.toml index b0811cddd92..56552d29f23 100644 --- a/sway-lsp/tests/utils/Cargo.toml +++ b/sway-lsp/tests/utils/Cargo.toml @@ -17,4 +17,4 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.60" tokio = { version = "1.3", features = ["io-std", "io-util", "macros", "net", "rt-multi-thread", "sync", "time"] } tower = { version = "0.4.12", default-features = false, features = ["util"] } -tower-lsp = { version = "0.20", features = ["proposed"] } \ No newline at end of file +tower-lsp = { version = "0.20", features = ["proposed"] } diff --git a/sway-types/src/source_engine.rs b/sway-types/src/source_engine.rs index 65c145f7414..33ad23d5a8a 100644 --- a/sway-types/src/source_engine.rs +++ b/sway-types/src/source_engine.rs @@ -27,32 +27,12 @@ pub struct SourceEngine { impl Clone for SourceEngine { fn clone(&self) -> Self { SourceEngine { - next_source_id: RwLock::new(*self.next_source_id.read().expect("Lock is poisoned")), - path_to_source_map: RwLock::new( - self.path_to_source_map - .read() - .expect("Lock is poisoned") - .clone(), - ), - source_to_path_map: RwLock::new( - self.source_to_path_map - .read() - .expect("Lock is poisoned") - .clone(), - ), - next_module_id: RwLock::new(*self.next_module_id.read().expect("Lock is poisoned")), - path_to_module_map: RwLock::new( - self.path_to_module_map - .read() - .expect("Lock is poisoned") - .clone(), - ), - module_to_sources_map: RwLock::new( - self.module_to_sources_map - .read() - .expect("Lock is poisoned") - .clone(), - ), + next_source_id: RwLock::new(*self.next_source_id.read().unwrap()), + path_to_source_map: RwLock::new(self.path_to_source_map.read().unwrap().clone()), + source_to_path_map: RwLock::new(self.source_to_path_map.read().unwrap().clone()), + next_module_id: RwLock::new(*self.next_module_id.read().unwrap()), + path_to_module_map: RwLock::new(self.path_to_module_map.read().unwrap().clone()), + module_to_sources_map: RwLock::new(self.module_to_sources_map.read().unwrap().clone()), } } } From 6b8c46ea60448665076859b1c2f6126984cc4048 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 10:49:23 +1100 Subject: [PATCH 27/40] add Cargo.lock --- Cargo.lock | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 26f49a97043..ef56e7e84b4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6520,6 +6520,7 @@ dependencies = [ "anyhow", "assert-json-diff", "criterion", + "crossbeam-channel", "dashmap", "dirs 4.0.0", "fd-lock 4.0.1", @@ -6534,6 +6535,7 @@ dependencies = [ "pretty_assertions", "proc-macro2", "quote", + "rand", "rayon", "regex", "ropey", @@ -7268,9 +7270,9 @@ checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" [[package]] name = "tower-lsp" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b38fb0e6ce037835174256518aace3ca621c4f96383c56bb846cfc11b341910" +checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508" dependencies = [ "async-trait", "auto_impl", @@ -7291,13 +7293,13 @@ dependencies = [ [[package]] name = "tower-lsp-macros" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34723c06344244474fdde365b76aebef8050bf6be61a935b91ee9ff7c4e91157" +checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.39", ] [[package]] From f22e5e5f7efa57ae9fa57d6d219063d33efdfea1 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 10:49:35 +1100 Subject: [PATCH 28/40] clippy and rebase --- sway-core/src/lib.rs | 10 ++-------- sway-lsp/benches/lsp_benchmarks/compile.rs | 10 ++++------ sway-lsp/benches/lsp_benchmarks/mod.rs | 2 +- sway-lsp/src/capabilities/semantic_tokens.rs | 2 +- sway-lsp/src/core/session.rs | 6 ++---- sway-lsp/src/handlers/notification.rs | 4 ++-- sway-lsp/src/server_state.rs | 5 ++--- 7 files changed, 14 insertions(+), 25 deletions(-) diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 9b6c0a88942..469da991469 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -619,17 +619,12 @@ pub fn compile_to_ast( if is_parse_module_cache_up_to_date(engines, &path, include_tests) { let mut entry = query_engine .get_programs_cache_entry(&path) - .expect(&format!( - "unable to find entry in cache at path {:?}", - &path - )); + .unwrap_or_else(|| panic!("unable to find entry in cache at path {:?}", &path)); entry.programs.metrics.reused_modules += 1; - + let (warnings, errors) = entry.handler_data; let new_handler = Handler::from_parts(warnings, errors); handler.append(new_handler); - - //eprintln!("re-using cached prgram data, returning from compilation"); return Ok(entry.programs); }; } @@ -688,7 +683,6 @@ pub fn compile_to_ast( if let Some(config) = build_config { let path = config.canonical_root_module(); - let cache_entry = ProgramsCacheEntry { path, programs: programs.clone(), diff --git a/sway-lsp/benches/lsp_benchmarks/compile.rs b/sway-lsp/benches/lsp_benchmarks/compile.rs index e868d9d2d85..070a9674834 100644 --- a/sway-lsp/benches/lsp_benchmarks/compile.rs +++ b/sway-lsp/benches/lsp_benchmarks/compile.rs @@ -11,13 +11,13 @@ fn benchmarks(c: &mut Criterion) { c.bench_function("compile", |b| { b.iter(|| { let engines = Engines::default(); - let _ = black_box(session::compile(&uri, None, &engines, None).unwrap()); + let _ = black_box(session::compile(&uri, &engines, None).unwrap()); }) }); c.bench_function("traverse", |b| { let engines = Engines::default(); - let results = black_box(session::compile(&uri, None, &engines, None).unwrap()); + let results = black_box(session::compile(&uri, &engines, None).unwrap()); b.iter(|| { let _ = black_box(session::traverse(results.clone(), &engines).unwrap()); }) @@ -26,10 +26,8 @@ fn benchmarks(c: &mut Criterion) { c.bench_function("did_change_with_caching", |b| { let engines = Engines::default(); b.iter(|| { - for version in 0..NUM_DID_CHANGE_ITERATIONS { - let _ = black_box( - session::compile(&uri, Some(version as i32), &engines, None).unwrap(), - ); + for _ in 0..NUM_DID_CHANGE_ITERATIONS { + let _ = black_box(session::compile(&uri, &engines, None).unwrap()); } }) }); diff --git a/sway-lsp/benches/lsp_benchmarks/mod.rs b/sway-lsp/benches/lsp_benchmarks/mod.rs index df1f9a281a3..cf4bf9827f0 100644 --- a/sway-lsp/benches/lsp_benchmarks/mod.rs +++ b/sway-lsp/benches/lsp_benchmarks/mod.rs @@ -12,7 +12,7 @@ pub async fn compile_test_project() -> (Url, Arc) { let uri = Url::from_file_path(benchmark_dir().join("src/main.sw")).unwrap(); session.handle_open_file(&uri).await; // Compile the project and write the parse result to the session - let parse_result = session::parse_project(&uri, None, &session.engines.read(), None).unwrap(); + let parse_result = session::parse_project(&uri, &session.engines.read(), None).unwrap(); session.write_parse_result(parse_result); (uri, Arc::new(session)) } diff --git a/sway-lsp/src/capabilities/semantic_tokens.rs b/sway-lsp/src/capabilities/semantic_tokens.rs index 2d348294ff5..06c4c513270 100644 --- a/sway-lsp/src/capabilities/semantic_tokens.rs +++ b/sway-lsp/src/capabilities/semantic_tokens.rs @@ -42,7 +42,7 @@ pub fn semantic_tokens_range( /// Sort tokens by their span so each token is sequential. /// /// If this step isn't done, then the bit offsets used for the lsp_types::SemanticToken are incorrect. -fn sort_tokens(tokens: &mut Vec<(TokenIdent, Token)>) { +fn sort_tokens(tokens: &mut [(TokenIdent, Token)]) { tokens.sort_by(|(a_span, _), (b_span, _)| { let a = (a_span.range.start, a_span.range.end); let b = (b_span.range.start, b_span.range.end); diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 41af7156c16..aa40af40507 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -435,7 +435,6 @@ pub(crate) fn build_plan(uri: &Url) -> Result { pub fn compile( uri: &Url, - version: Option, engines: &Engines, retrigger_compilation: Option>, ) -> Result, Handler)>, LanguageServerError> { @@ -538,11 +537,10 @@ pub fn traverse( /// Parses the project and returns true if the compiler diagnostics are new and should be published. pub fn parse_project( uri: &Url, - version: Option, engines: &Engines, retrigger_compilation: Option>, ) -> Result { - let results = compile(uri, version, engines, retrigger_compilation)?; + let results = compile(uri, engines, retrigger_compilation)?; if results.last().is_none() { //eprintln!("compilation failed, returning"); return Err(LanguageServerError::ProgramsIsNone); @@ -632,7 +630,7 @@ mod tests { let uri = get_url(&dir); let engines = Engines::default(); let result = - parse_project(&uri, None, &engines, None).expect_err("expected ManifestFileNotFound"); + parse_project(&uri, &engines, None).expect_err("expected ManifestFileNotFound"); assert!(matches!( result, LanguageServerError::DocumentError( diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index 7de957bb1bb..ccaaca5555c 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -91,7 +91,7 @@ pub async fn handle_did_change_text_document( .await?; //eprintln!("changes for version {:?} have been written to disk", params.text_document.version); send_new_compilation_request( - &state, + state, session.clone(), &uri, Some(params.text_document.version), @@ -111,7 +111,7 @@ pub(crate) async fn handle_did_save_text_document( .await?; session.sync.resync()?; //eprintln!("resynced"); - send_new_compilation_request(&state, session.clone(), &uri, None); + send_new_compilation_request(state, session.clone(), &uri, None); state.wait_for_parsing().await; state .publish_diagnostics(uri, params.text_document.uri, session) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 3663597f65b..308c28865dc 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -158,7 +158,6 @@ impl ServerState { //eprintln!("THREAD | starting parsing project: version: {:?}", version); match session::parse_project( &uri, - version, &engines_clone, Some(retrigger_compilation.clone()), ) { @@ -176,7 +175,7 @@ impl ServerState { ); *last_compilation_state.write() = LastCompilationState::Success; } - Err(err) => { + Err(_err) => { //eprintln!("compilation has returned cancelled {:?}", err); update_compilation_state( is_compiling.clone(), @@ -232,7 +231,7 @@ impl ServerState { tracing::info!("Shutting Down the Sway Language Server"); // Drain pending compilation requests - while let Ok(_) = self.cb_rx.try_recv() { + while self.cb_rx.try_recv().is_ok() { //eprintln!("draining pending compilation requests"); } // set the retrigger_compilation flag to true so that the compilation exit early From 093adebbcee935fef83dca5f11545b97a0d26042 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 11:33:17 +1100 Subject: [PATCH 29/40] tidy up check should abort --- sway-core/src/lib.rs | 25 ++++++++---------------- sway-error/src/handler.rs | 2 +- sway-lsp/src/server_state.rs | 37 ++++++++++++++++++------------------ 3 files changed, 27 insertions(+), 37 deletions(-) diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 469da991469..32f81b663a5 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -479,7 +479,7 @@ pub fn parsed_to_ast( package_name, ); - check_should_abort(handler, retrigger_compilation.clone(), 482)?; + check_should_abort(handler, retrigger_compilation.clone())?; let mut typed_program = match typed_program_opt { Ok(typed_program) => typed_program, @@ -529,8 +529,6 @@ pub fn parsed_to_ast( None => (None, None), }; - check_should_abort(handler, retrigger_compilation.clone(), 532)?; - // Perform control flow analysis and extend with any errors. let _ = perform_control_flow_analysis( handler, @@ -540,8 +538,6 @@ pub fn parsed_to_ast( print_graph_url_format, ); - check_should_abort(handler, retrigger_compilation.clone(), 543)?; - // Evaluate const declarations, to allow storage slots initialization with consts. let mut ctx = Context::new(engines.se()); let mut md_mgr = MetadataManager::default(); @@ -607,7 +603,8 @@ pub fn compile_to_ast( package_name: &str, retrigger_compilation: Option>, ) -> Result { - check_should_abort(handler, retrigger_compilation.clone(), 610)?; + check_should_abort(handler, retrigger_compilation.clone())?; + let query_engine = engines.qe(); let mut metrics = PerformanceData::default(); @@ -621,7 +618,7 @@ pub fn compile_to_ast( .get_programs_cache_entry(&path) .unwrap_or_else(|| panic!("unable to find entry in cache at path {:?}", &path)); entry.programs.metrics.reused_modules += 1; - + let (warnings, errors) = entry.handler_data; let new_handler = Handler::from_parts(warnings, errors); handler.append(new_handler); @@ -629,8 +626,6 @@ pub fn compile_to_ast( }; } - check_should_abort(handler, retrigger_compilation.clone(), 632)?; - // Parse the program to a concrete syntax tree (CST). let parse_program_opt = time_expr!( "parse the program to a concrete syntax tree (CST)", @@ -640,7 +635,7 @@ pub fn compile_to_ast( metrics ); - check_should_abort(handler, retrigger_compilation.clone(), 643)?; + check_should_abort(handler, retrigger_compilation.clone())?; let (lexed_program, mut parsed_program) = match parse_program_opt { Ok(modules) => modules, @@ -675,7 +670,7 @@ pub fn compile_to_ast( metrics ); - check_should_abort(handler, retrigger_compilation.clone(), 678)?; + check_should_abort(handler, retrigger_compilation.clone())?; handler.dedup(); @@ -961,20 +956,16 @@ fn module_return_path_analysis( } } +/// Check if the retrigger compilation flag has been set to true in the language server. +/// If it has, there is a new compilation request, so we should abort the current compilation. fn check_should_abort( handler: &Handler, retrigger_compilation: Option>, - line: u32, ) -> Result<(), ErrorEmitted> { if let Some(ref retrigger_compilation) = retrigger_compilation { if retrigger_compilation.load(Ordering::SeqCst) { - eprintln!("Aborting compilation due to retrigger as line {}.", line); return Err(handler.cancel()); - } else { - //eprintln!("Continuing compilation at line {}.", line); } - } else { - //eprintln!("retrigger_compilation is None at line {}.", line); } Ok(()) } diff --git a/sway-error/src/handler.rs b/sway-error/src/handler.rs index 4420ad6f53f..e60adaf2abb 100644 --- a/sway-error/src/handler.rs +++ b/sway-error/src/handler.rs @@ -34,7 +34,7 @@ impl Handler { ErrorEmitted { _priv: () } } - // todo: decide what to return here + // Compilation should be cancelled. pub fn cancel(&self) -> ErrorEmitted { ErrorEmitted { _priv: () } } diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 308c28865dc..3f890c3e545 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -19,6 +19,7 @@ use std::{ Arc, }, }; +use tokio::sync::Notify; use tower_lsp::{jsonrpc, Client}; /// `ServerState` is the primary mutable state of the language server @@ -31,7 +32,7 @@ pub struct ServerState { pub is_compiling: Arc, pub(crate) cb_tx: Sender, pub(crate) cb_rx: Arc>, - pub(crate) finished_compilation: Arc, + pub(crate) finished_compilation: Arc, last_compilation_state: Arc>, } @@ -47,7 +48,7 @@ impl Default for ServerState { is_compiling: Arc::new(AtomicBool::new(false)), cb_tx, cb_rx: Arc::new(cb_rx), - finished_compilation: Arc::new(tokio::sync::Notify::new()), + finished_compilation: Arc::new(Notify::new()), last_compilation_state: Arc::new(RwLock::new(LastCompilationState::Uninitialized)), }; // Spawn a new thread dedicated to handling compilation tasks @@ -57,8 +58,7 @@ impl Default for ServerState { } /// `LastCompilationState` represents the state of the last compilation process. -/// It's primarily used for debugging purposes. -#[derive(Debug)] +#[derive(Debug, PartialEq)] enum LastCompilationState { Success, Failed, @@ -84,10 +84,11 @@ pub struct CompilationContext { /// This function is responsible for managing the compilation flags and signaling /// the completion of the compilation process if there is no pending compilation work. +/// Tokio's [Notify] is used to notify waiters that the compilation process has finished. fn update_compilation_state( is_compiling: Arc, retrigger_compilation: Arc, - finished_compilation: Arc, + finished_compilation: Arc, rx: Arc>, ) { //eprintln!("THREAD | update_compilation_state"); @@ -142,7 +143,6 @@ impl ServerState { if let Some(version) = version { // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. if version % 10 == 0 { - eprintln!("Garbage collecting"); // Call this on the engines clone so we don't clear types that are still in use // and might be needed in the case cancel compilation was triggered. if let Err(err) = session.garbage_collect(&mut engines_clone) { @@ -167,30 +167,29 @@ impl ServerState { //eprintln!("THREAD | success, about to write parse results: {:?}", version); session.write_parse_result(parse_result); //eprintln!("THREAD | finished writing parse results: {:?}", version); - update_compilation_state( - is_compiling.clone(), - retrigger_compilation.clone(), - finished_compilation.clone(), - rx.clone(), - ); *last_compilation_state.write() = LastCompilationState::Success; } Err(_err) => { //eprintln!("compilation has returned cancelled {:?}", err); - update_compilation_state( - is_compiling.clone(), - retrigger_compilation.clone(), - finished_compilation.clone(), - rx.clone(), - ); *last_compilation_state.write() = LastCompilationState::Failed; - continue; } } + + update_compilation_state( + is_compiling.clone(), + retrigger_compilation.clone(), + finished_compilation.clone(), + rx.clone(), + ); + if *last_compilation_state.read() == LastCompilationState::Failed { + continue; + } //eprintln!("THREAD | finished parsing project: version: {:?}", version); } TaskMessage::Terminate => { //eprintln!("THREAD | received terminate message"); + + // If we receive a terminate message, we need to exit the thread return; } } From 9b68c993182d457c5b169ab2f3e373627f7fca70 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 12:16:00 +1100 Subject: [PATCH 30/40] timings wip --- sway-lsp/src/core/session.rs | 1 - sway-lsp/tests/lib.rs | 19 ++++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index aa40af40507..075fa397771 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -302,7 +302,6 @@ impl Session { path: uri.path().to_string(), err: err.to_string(), })?; - Ok(()) } diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 69a73d02207..8cbaf8d9dd1 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -114,19 +114,20 @@ async fn did_cache_test() { } #[tokio::test] -#[allow(dead_code)] async fn did_change_stress_test() { let (mut service, _) = LspService::build(ServerState::new) .custom_method("sway/metrics", ServerState::metrics) .finish(); let bench_dir = sway_workspace_dir().join("sway-lsp/tests/fixtures/benchmark"); let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await; - let times = 4000; + let times = 400; for version in 0..times { + let now = std::time::Instant::now(); let _ = lsp::did_change_request(&mut service, &uri, version + 1).await; if version == 0 { service.inner().wait_for_parsing().await; } + eprintln!("did_change took {:?}", now.elapsed()); let metrics = lsp::metrics_request(&mut service, &uri).await; for (path, metrics) in metrics { if path.contains("sway-lib-core") || path.contains("sway-lib-std") { @@ -134,13 +135,13 @@ async fn did_change_stress_test() { } } - if rand::random::() < 230 { - let random_duration = rand::random::() as u64 % 10; - std::thread::sleep(std::time::Duration::from_millis(random_duration)); - } else { - let random_duration = rand::random::() % 3000; - std::thread::sleep(std::time::Duration::from_millis(random_duration)); - } + // if rand::random::() < 230 { + // let random_duration = rand::random::() as u64 % 10; + // std::thread::sleep(std::time::Duration::from_millis(random_duration)); + // } else { + // let random_duration = rand::random::() % 3000; + // std::thread::sleep(std::time::Duration::from_millis(random_duration)); + // } } shutdown_and_exit(&mut service).await; } From 43252bd84cd97d4eda96f648195f3b98a9d43e95 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 13:14:39 +1100 Subject: [PATCH 31/40] about to remove printlns --- sway-core/src/lib.rs | 2 ++ sway-lsp/Cargo.toml | 1 - sway-lsp/tests/lib.rs | 10 ---------- 3 files changed, 2 insertions(+), 11 deletions(-) diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 32f81b663a5..c6c53039366 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -529,6 +529,8 @@ pub fn parsed_to_ast( None => (None, None), }; + check_should_abort(handler, retrigger_compilation.clone())?; + // Perform control flow analysis and extend with any errors. let _ = perform_control_flow_analysis( handler, diff --git a/sway-lsp/Cargo.toml b/sway-lsp/Cargo.toml index bdfd49f2b0a..d444d0ccdc7 100644 --- a/sway-lsp/Cargo.toml +++ b/sway-lsp/Cargo.toml @@ -60,7 +60,6 @@ futures = { version = "0.3", default-features = false, features = [ "async-await", ] } pretty_assertions = "1.4.0" -rand = "0.8" regex = "^1.10.2" sway-lsp-test-utils = { path = "tests/utils" } tikv-jemallocator = "0.5" diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 8cbaf8d9dd1..1acc9708371 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -122,26 +122,16 @@ async fn did_change_stress_test() { let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await; let times = 400; for version in 0..times { - let now = std::time::Instant::now(); let _ = lsp::did_change_request(&mut service, &uri, version + 1).await; if version == 0 { service.inner().wait_for_parsing().await; } - eprintln!("did_change took {:?}", now.elapsed()); let metrics = lsp::metrics_request(&mut service, &uri).await; for (path, metrics) in metrics { if path.contains("sway-lib-core") || path.contains("sway-lib-std") { assert!(metrics.reused_modules >= 1); } } - - // if rand::random::() < 230 { - // let random_duration = rand::random::() as u64 % 10; - // std::thread::sleep(std::time::Duration::from_millis(random_duration)); - // } else { - // let random_duration = rand::random::() % 3000; - // std::thread::sleep(std::time::Duration::from_millis(random_duration)); - // } } shutdown_and_exit(&mut service).await; } From b0a91be557ff139fb6c1f16d1d16669084c05794 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 13:33:24 +1100 Subject: [PATCH 32/40] final clean up --- sway-lsp/src/core/session.rs | 7 --- sway-lsp/src/handlers/notification.rs | 20 ++----- sway-lsp/src/handlers/request.rs | 1 - sway-lsp/src/server_state.rs | 82 ++++++--------------------- 4 files changed, 24 insertions(+), 86 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 075fa397771..d80b3c01069 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -149,7 +149,6 @@ impl Session { self.runnables.clear(); self.metrics.clear(); - //eprintln!("THREAD | success, about to token map"); res.token_map.deref().iter().for_each(|item| { let (i, t) = item.pair(); self.token_map.insert(i.clone(), t.clone()); @@ -161,7 +160,6 @@ impl Session { }); let (errors, warnings) = res.diagnostics; - //eprintln!("THREAD | success, about to write diagnostics"); *self.diagnostics.write() = capabilities::diagnostic::get_diagnostics(&warnings, &errors, self.engines.read().se()); @@ -170,7 +168,6 @@ impl Session { self.engines.read().de(), self.engines.read().se(), ); - //eprintln!("THREAD | success, about to write programs"); self.compiled_program.write().lexed = Some(res.lexed); self.compiled_program.write().parsed = Some(res.parsed); self.compiled_program.write().typed = Some(res.typed); @@ -541,10 +538,7 @@ pub fn parse_project( ) -> Result { let results = compile(uri, engines, retrigger_compilation)?; if results.last().is_none() { - //eprintln!("compilation failed, returning"); return Err(LanguageServerError::ProgramsIsNone); - } else { - //eprintln!("compilation successful, starting traversal"); } let TraversalResult { diagnostics, @@ -552,7 +546,6 @@ pub fn parse_project( token_map, metrics, } = traverse(results, engines)?; - //eprintln!("traversal successful"); let (lexed, parsed, typed) = programs.ok_or(LanguageServerError::ProgramsIsNone)?; Ok(ParseResult { diagnostics, diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index ccaaca5555c..6527632fbd3 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -16,7 +16,6 @@ pub async fn handle_did_open_text_document( state: &ServerState, params: DidOpenTextDocumentParams, ) -> Result<(), LanguageServerError> { - eprintln!("did_open_text_document"); let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) @@ -36,7 +35,6 @@ pub async fn handle_did_open_text_document( })); state.is_compiling.store(true, Ordering::SeqCst); - eprintln!("did open - waiting for parsing to finish"); state.wait_for_parsing().await; state .publish_diagnostics(uri, params.text_document.uri, session) @@ -51,21 +49,20 @@ fn send_new_compilation_request( uri: &Url, version: Option, ) { - //eprintln!("new compilation request: version {:?} - setting is_compiling to true", version); if state.is_compiling.load(Ordering::SeqCst) { - // eprintln!("retrigger compilation!"); + // If we are already compiling, then we need to retrigger compilation state.retrigger_compilation.store(true, Ordering::SeqCst); } - // If channel is full, remove the old value so the compilation - // thread only gets the latest value. + // Check if the channel is full. If it is, we want to ensure that the compilation + // thread receives only the most recent value. if state.cb_tx.is_full() { - if let Ok(TaskMessage::CompilationContext(_)) = state.cb_rx.try_recv() { - //eprintln!("channel is full! discarding version: {:?}", res.version); + while let Ok(TaskMessage::CompilationContext(_)) = state.cb_rx.try_recv() { + // Loop will continue to remove `CompilationContext` messages + // until the channel has no more of them. } } - //eprintln!("sending new compilation request: version {:?}", version); let _ = state .cb_tx .send(TaskMessage::CompilationContext(CompilationContext { @@ -79,17 +76,14 @@ pub async fn handle_did_change_text_document( state: &ServerState, params: DidChangeTextDocumentParams, ) -> Result<(), LanguageServerError> { - //eprintln!("did change text document: version: {:?}", params.text_document.version); document::mark_file_as_dirty(¶ms.text_document.uri).await?; let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await?; - //eprintln!("writing changes to file for version: {:?}", params.text_document.version); session .write_changes_to_file(&uri, params.content_changes) .await?; - //eprintln!("changes for version {:?} have been written to disk", params.text_document.version); send_new_compilation_request( state, session.clone(), @@ -103,14 +97,12 @@ pub(crate) async fn handle_did_save_text_document( state: &ServerState, params: DidSaveTextDocumentParams, ) -> Result<(), LanguageServerError> { - //eprintln!("did save text document"); document::remove_dirty_flag(¶ms.text_document.uri).await?; let (uri, session) = state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) .await?; session.sync.resync()?; - //eprintln!("resynced"); send_new_compilation_request(state, session.clone(), &uri, None); state.wait_for_parsing().await; state diff --git a/sway-lsp/src/handlers/request.rs b/sway-lsp/src/handlers/request.rs index 29d24b180b0..f0c212fce2c 100644 --- a/sway-lsp/src/handlers/request.rs +++ b/sway-lsp/src/handlers/request.rs @@ -257,7 +257,6 @@ pub async fn handle_code_lens( state: &ServerState, params: lsp_types::CodeLensParams, ) -> Result>> { - eprintln!("code_lens"); let _ = state.wait_for_parsing().await; match state .sessions diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 3f890c3e545..4f436e46350 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -82,34 +82,6 @@ pub struct CompilationContext { pub version: Option, } -/// This function is responsible for managing the compilation flags and signaling -/// the completion of the compilation process if there is no pending compilation work. -/// Tokio's [Notify] is used to notify waiters that the compilation process has finished. -fn update_compilation_state( - is_compiling: Arc, - retrigger_compilation: Arc, - finished_compilation: Arc, - rx: Arc>, -) { - //eprintln!("THREAD | update_compilation_state"); - - is_compiling.store(false, Ordering::SeqCst); - //eprintln!("THREAD | is_compiling = {:?}", is_compiling.load(Ordering::SeqCst)); - - retrigger_compilation.store(false, Ordering::SeqCst); - //eprintln!("THREAD | retrigger_compilation = {:?}", retrigger_compilation.load(Ordering::SeqCst)); - - // Make sure there isn't any pending compilation work - if rx.is_empty() { - //eprintln!("THREAD | no pending compilation work, safe to set is_compiling to false"); - - //eprintln!("THREAD | finished compilation, notifying waiters"); - finished_compilation.notify_waiters(); - } else { - //eprintln!("THREAD | there is pending compilation work"); - } -} - impl ServerState { pub fn new(client: Client) -> ServerState { ServerState { @@ -133,14 +105,11 @@ impl ServerState { while let Ok(msg) = rx.recv() { match msg { TaskMessage::CompilationContext(ctx) => { - //eprintln!("THREAD | received new compilation request"); - let uri = ctx.uri.as_ref().unwrap().clone(); - let version = ctx.version; let session = ctx.session.as_ref().unwrap().clone(); let mut engines_clone = session.engines.read().clone(); - if let Some(version) = version { + if let Some(version) = ctx.version { // Garbage collection is fairly expsensive so we only clear on every 10th keystroke. if version % 10 == 0 { // Call this on the engines clone so we don't clear types that are still in use @@ -154,41 +123,38 @@ impl ServerState { } } + // Set the is_compiling flag to true so that the wait_for_parsing function knows that we are compiling is_compiling.store(true, Ordering::SeqCst); - //eprintln!("THREAD | starting parsing project: version: {:?}", version); match session::parse_project( &uri, &engines_clone, Some(retrigger_compilation.clone()), ) { Ok(parse_result) => { - //eprintln!("THREAD | engines_write: {:?}", version); *session.engines.write() = engines_clone; - //eprintln!("THREAD | success, about to write parse results: {:?}", version); session.write_parse_result(parse_result); - //eprintln!("THREAD | finished writing parse results: {:?}", version); *last_compilation_state.write() = LastCompilationState::Success; } Err(_err) => { - //eprintln!("compilation has returned cancelled {:?}", err); *last_compilation_state.write() = LastCompilationState::Failed; } } - update_compilation_state( - is_compiling.clone(), - retrigger_compilation.clone(), - finished_compilation.clone(), - rx.clone(), - ); + // Reset the flags to false + is_compiling.store(false, Ordering::SeqCst); + retrigger_compilation.store(false, Ordering::SeqCst); + + // Make sure there isn't any pending compilation work + if rx.is_empty() { + // finished compilation, notify waiters + finished_compilation.notify_waiters(); + } + if *last_compilation_state.read() == LastCompilationState::Failed { continue; } - //eprintln!("THREAD | finished parsing project: version: {:?}", version); } TaskMessage::Terminate => { - //eprintln!("THREAD | received terminate message"); - // If we receive a terminate message, we need to exit the thread return; } @@ -204,25 +170,15 @@ impl ServerState { /// this process until `is_compiling` becomes false. pub async fn wait_for_parsing(&self) { loop { - //eprintln!("are we still compiling? | is_compiling = {:?}", self.is_compiling.load(Ordering::SeqCst)); if !self.is_compiling.load(Ordering::SeqCst) { - //eprintln!("compilation is finished, lets check if there are pending compilation requests"); + // compilation is finished, lets check if there are pending compilation requests. if self.cb_rx.is_empty() { - //eprintln!("no pending compilation work, safe to break"); - eprintln!( - "And the last compilation state was: {:?}", - &self.last_compilation_state.read() - ); - + // no pending compilation work, safe to break. break; - } else { - //eprintln!("there is pending compilation work, lets wait for it to finish"); } - } else { - //eprintln!("we are still compiling, lets wait to be notified"); } + // We are still compiling, lets wait to be notified. self.finished_compilation.notified().await; - //eprintln!("we were notified, lets check if we are still compiling"); } } @@ -230,19 +186,17 @@ impl ServerState { tracing::info!("Shutting Down the Sway Language Server"); // Drain pending compilation requests - while self.cb_rx.try_recv().is_ok() { - //eprintln!("draining pending compilation requests"); - } + while self.cb_rx.try_recv().is_ok() {} + // set the retrigger_compilation flag to true so that the compilation exit early self.retrigger_compilation.store(true, Ordering::SeqCst); self.wait_for_parsing().await; - //eprintln!("sending terminate message"); + // Send a terminate message to the compilation thread self.cb_tx .send(TaskMessage::Terminate) .expect("failed to send terminate message"); - //eprintln!("shutting down the sessions"); let _ = self.sessions.iter().map(|item| { let session = item.value(); session.shutdown(); From d14665d4eea2a1b847a02e6ff821b58c255b0a08 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Fri, 5 Jan 2024 14:15:06 +1100 Subject: [PATCH 33/40] wait for parsing to finish in didChange test --- sway-lsp/tests/lib.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 1acc9708371..b446314fb28 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -92,6 +92,7 @@ async fn did_change() { let (mut service, _) = LspService::new(ServerState::new); let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; let _ = lsp::did_change_request(&mut service, &uri, 1).await; + service.inner().wait_for_parsing().await; shutdown_and_exit(&mut service).await; } @@ -113,7 +114,8 @@ async fn did_cache_test() { shutdown_and_exit(&mut service).await; } -#[tokio::test] +// #[tokio::test] +#[allow(dead_code)] async fn did_change_stress_test() { let (mut service, _) = LspService::build(ServerState::new) .custom_method("sway/metrics", ServerState::metrics) From c5c45fe9b422dae3083e79b568f22538206a84bc Mon Sep 17 00:00:00 2001 From: Joshua Batty Date: Mon, 8 Jan 2024 07:57:56 +1100 Subject: [PATCH 34/40] Grammar MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: João Matos --- sway-lsp/src/server_state.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 4f436e46350..5893fbef533 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -188,7 +188,7 @@ impl ServerState { // Drain pending compilation requests while self.cb_rx.try_recv().is_ok() {} - // set the retrigger_compilation flag to true so that the compilation exit early + // Set the retrigger_compilation flag to true so that the compilation exits early self.retrigger_compilation.store(true, Ordering::SeqCst); self.wait_for_parsing().await; From 95022ee0f95a4f97e48844c00ceb6f5ff7929fc4 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Mon, 8 Jan 2024 10:14:56 +1100 Subject: [PATCH 35/40] feedback --- sway-core/src/lib.rs | 4 +--- sway-lsp/src/handlers/notification.rs | 1 - sway-lsp/src/server_state.rs | 3 ++- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index c6c53039366..718097127ba 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -616,9 +616,7 @@ pub fn compile_to_ast( // Check if we can re-use the data in the cache. if is_parse_module_cache_up_to_date(engines, &path, include_tests) { - let mut entry = query_engine - .get_programs_cache_entry(&path) - .unwrap_or_else(|| panic!("unable to find entry in cache at path {:?}", &path)); + let mut entry = query_engine.get_programs_cache_entry(&path).unwrap(); entry.programs.metrics.reused_modules += 1; let (warnings, errors) = entry.handler_data; diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index 6527632fbd3..087bc501926 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -25,7 +25,6 @@ pub async fn handle_did_open_text_document( // Otherwise, don't recompile the project when a new file in the project is opened // as the workspace is already compiled. if session.token_map().is_empty() { - // send_new_compilation_request(&state, session.clone(), &uri, None); let _ = state .cb_tx .send(TaskMessage::CompilationContext(CompilationContext { diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 5893fbef533..e22b3d5679a 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -13,6 +13,7 @@ use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; use std::{ + mem, path::PathBuf, sync::{ atomic::{AtomicBool, Ordering}, @@ -131,7 +132,7 @@ impl ServerState { Some(retrigger_compilation.clone()), ) { Ok(parse_result) => { - *session.engines.write() = engines_clone; + mem::swap(&mut *session.engines.write(), &mut engines_clone); session.write_parse_result(parse_result); *last_compilation_state.write() = LastCompilationState::Success; } From 1c9fed440fb54e893d5323f55003fb71be752c9b Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Mon, 8 Jan 2024 10:15:15 +1100 Subject: [PATCH 36/40] Cargo.lock --- Cargo.lock | 1 - 1 file changed, 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index ef56e7e84b4..34ae858c349 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6535,7 +6535,6 @@ dependencies = [ "pretty_assertions", "proc-macro2", "quote", - "rand", "rayon", "regex", "ropey", From 61e652547b8c10c3d487a3fdf6032652591191ae Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Mon, 8 Jan 2024 11:02:07 +1100 Subject: [PATCH 37/40] mem swap the compiled programs from ParseResult --- sway-lsp/src/core/session.rs | 59 ++++++++++++++++++++++-------------- sway-lsp/src/server_state.rs | 8 +++-- 2 files changed, 41 insertions(+), 26 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index d80b3c01069..11b7dc8c573 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -61,12 +61,21 @@ pub struct CompiledProgram { pub struct ParseResult { pub(crate) diagnostics: (Vec, Vec), pub(crate) token_map: TokenMap, - pub(crate) lexed: LexedProgram, - pub(crate) parsed: ParseProgram, - pub(crate) typed: ty::TyProgram, + pub(crate) compiled_program: CompiledProgram, pub(crate) metrics: DashMap, } +impl Default for ParseResult { + fn default() -> Self { + ParseResult { + diagnostics: Default::default(), + token_map: Default::default(), + compiled_program: Default::default(), + metrics: Default::default(), + } + } +} + /// A `Session` is used to store information about a single member in a workspace. /// It stores the parsed and typed Tokens, as well as the [TypeEngine] associated with the project. /// @@ -144,7 +153,8 @@ impl Session { /// Write the result of parsing to the session. /// This function should only be called after successfully parsing. - pub fn write_parse_result(&self, res: ParseResult) { + pub fn write_parse_result(&self, res: &mut ParseResult) { + let now = std::time::Instant::now(); self.token_map.clear(); self.runnables.clear(); self.metrics.clear(); @@ -159,18 +169,19 @@ impl Session { self.metrics.insert(*s, t.clone()); }); - let (errors, warnings) = res.diagnostics; + let (errors, warnings) = &res.diagnostics; *self.diagnostics.write() = capabilities::diagnostic::get_diagnostics(&warnings, &errors, self.engines.read().se()); - self.create_runnables( - &res.typed, - self.engines.read().de(), - self.engines.read().se(), + if let Some(typed) = &res.compiled_program.typed { + self.create_runnables(typed, self.engines.read().de(), self.engines.read().se()); + } + std::mem::swap( + &mut *self.compiled_program.write(), + &mut res.compiled_program, ); - self.compiled_program.write().lexed = Some(res.lexed); - self.compiled_program.write().parsed = Some(res.parsed); - self.compiled_program.write().typed = Some(res.typed); + + eprintln!("write_parse_result took {:?}", now.elapsed()); } pub fn token_ranges(&self, url: &Url, position: Position) -> Option> { @@ -535,7 +546,8 @@ pub fn parse_project( uri: &Url, engines: &Engines, retrigger_compilation: Option>, -) -> Result { + parse_result: &mut ParseResult, +) -> Result<(), LanguageServerError> { let results = compile(uri, engines, retrigger_compilation)?; if results.last().is_none() { return Err(LanguageServerError::ProgramsIsNone); @@ -547,14 +559,14 @@ pub fn parse_project( metrics, } = traverse(results, engines)?; let (lexed, parsed, typed) = programs.ok_or(LanguageServerError::ProgramsIsNone)?; - Ok(ParseResult { - diagnostics, - token_map, - lexed, - parsed, - typed, - metrics, - }) + + parse_result.diagnostics = diagnostics; + parse_result.token_map = token_map; + parse_result.compiled_program.lexed = Some(lexed); + parse_result.compiled_program.parsed = Some(parsed); + parse_result.compiled_program.typed = Some(typed); + parse_result.metrics = metrics; + Ok(()) } /// Parse the [ParseProgram] AST to populate the [TokenMap] with parsed AST nodes. @@ -621,8 +633,9 @@ mod tests { let dir = get_absolute_path("sway-lsp/tests/fixtures"); let uri = get_url(&dir); let engines = Engines::default(); - let result = - parse_project(&uri, &engines, None).expect_err("expected ManifestFileNotFound"); + let parse_result = &mut ParseResult::default(); + let result = parse_project(&uri, &engines, None, parse_result) + .expect_err("expected ManifestFileNotFound"); assert!(matches!( result, LanguageServerError::DocumentError( diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index e22b3d5679a..a58cf65bee9 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -2,7 +2,7 @@ use crate::{ config::{Config, Warnings}, - core::session::{self, Session}, + core::session::{self, ParseResult, Session}, error::{DirectoryError, DocumentError, LanguageServerError}, utils::debug, utils::keyword_docs::KeywordDocs, @@ -126,14 +126,16 @@ impl ServerState { // Set the is_compiling flag to true so that the wait_for_parsing function knows that we are compiling is_compiling.store(true, Ordering::SeqCst); + let mut parse_result = ParseResult::default(); match session::parse_project( &uri, &engines_clone, Some(retrigger_compilation.clone()), + &mut parse_result, ) { - Ok(parse_result) => { + Ok(_) => { mem::swap(&mut *session.engines.write(), &mut engines_clone); - session.write_parse_result(parse_result); + session.write_parse_result(&mut parse_result); *last_compilation_state.write() = LastCompilationState::Success; } Err(_err) => { From 643697f20d2fe4e65e950b3338a96e200425d85d Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Mon, 8 Jan 2024 11:10:26 +1100 Subject: [PATCH 38/40] update benchmark code --- sway-lsp/benches/lsp_benchmarks/mod.rs | 7 ++++--- sway-lsp/src/core/session.rs | 3 --- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/sway-lsp/benches/lsp_benchmarks/mod.rs b/sway-lsp/benches/lsp_benchmarks/mod.rs index cf4bf9827f0..d63feb507f2 100644 --- a/sway-lsp/benches/lsp_benchmarks/mod.rs +++ b/sway-lsp/benches/lsp_benchmarks/mod.rs @@ -4,7 +4,7 @@ pub mod token_map; use lsp_types::Url; use std::{path::PathBuf, sync::Arc}; -use sway_lsp::core::session::{self, Session}; +use sway_lsp::core::session::{self, ParseResult, Session}; pub async fn compile_test_project() -> (Url, Arc) { let session = Session::new(); @@ -12,8 +12,9 @@ pub async fn compile_test_project() -> (Url, Arc) { let uri = Url::from_file_path(benchmark_dir().join("src/main.sw")).unwrap(); session.handle_open_file(&uri).await; // Compile the project and write the parse result to the session - let parse_result = session::parse_project(&uri, &session.engines.read(), None).unwrap(); - session.write_parse_result(parse_result); + let mut parse_result = ParseResult::default(); + session::parse_project(&uri, &session.engines.read(), None, &mut parse_result).unwrap(); + session.write_parse_result(&mut parse_result); (uri, Arc::new(session)) } diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 11b7dc8c573..ae23a510802 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -154,7 +154,6 @@ impl Session { /// Write the result of parsing to the session. /// This function should only be called after successfully parsing. pub fn write_parse_result(&self, res: &mut ParseResult) { - let now = std::time::Instant::now(); self.token_map.clear(); self.runnables.clear(); self.metrics.clear(); @@ -180,8 +179,6 @@ impl Session { &mut *self.compiled_program.write(), &mut res.compiled_program, ); - - eprintln!("write_parse_result took {:?}", now.elapsed()); } pub fn token_ranges(&self, url: &Url, position: Position) -> Option> { From fcd0b374721345490a914754661638113be7cf4f Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Mon, 8 Jan 2024 11:15:46 +1100 Subject: [PATCH 39/40] remove unnecessary continue statement --- sway-lsp/src/server_state.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index a58cf65bee9..25320a2dec4 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -59,6 +59,7 @@ impl Default for ServerState { } /// `LastCompilationState` represents the state of the last compilation process. +/// It is primarily used for debugging purposes. #[derive(Debug, PartialEq)] enum LastCompilationState { Success, @@ -152,10 +153,6 @@ impl ServerState { // finished compilation, notify waiters finished_compilation.notify_waiters(); } - - if *last_compilation_state.read() == LastCompilationState::Failed { - continue; - } } TaskMessage::Terminate => { // If we receive a terminate message, we need to exit the thread From 2cf3c5d1f6dcee0992b92a7755e45d584ad36559 Mon Sep 17 00:00:00 2001 From: JoshuaBatty Date: Mon, 8 Jan 2024 11:21:09 +1100 Subject: [PATCH 40/40] clippy --- sway-lsp/src/core/session.rs | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index ae23a510802..7b43706ca0b 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -57,7 +57,7 @@ pub struct CompiledProgram { /// Used to write the result of compiling into so we can update /// the types in [Session] after successfully parsing. -#[derive(Debug)] +#[derive(Debug, Default)] pub struct ParseResult { pub(crate) diagnostics: (Vec, Vec), pub(crate) token_map: TokenMap, @@ -65,17 +65,6 @@ pub struct ParseResult { pub(crate) metrics: DashMap, } -impl Default for ParseResult { - fn default() -> Self { - ParseResult { - diagnostics: Default::default(), - token_map: Default::default(), - compiled_program: Default::default(), - metrics: Default::default(), - } - } -} - /// A `Session` is used to store information about a single member in a workspace. /// It stores the parsed and typed Tokens, as well as the [TypeEngine] associated with the project. /// @@ -170,7 +159,7 @@ impl Session { let (errors, warnings) = &res.diagnostics; *self.diagnostics.write() = - capabilities::diagnostic::get_diagnostics(&warnings, &errors, self.engines.read().se()); + capabilities::diagnostic::get_diagnostics(warnings, errors, self.engines.read().se()); if let Some(typed) = &res.compiled_program.typed { self.create_runnables(typed, self.engines.read().de(), self.engines.read().se());