From 886c0b8c1bed62f17d64246013724d0ec6a77177 Mon Sep 17 00:00:00 2001 From: Joshua Batty Date: Fri, 16 Aug 2024 08:05:57 +1000 Subject: [PATCH] Implement Typed Module Caching (#6280) ## Description This PR implements a typed module caching system for the compiler when interacted with from the language server. The main goal is to improve performance by caching typed modules and reusing them when possible, reducing unnecessary recompilation. ## Key Changes - Introduced `CowCache` for efficient, thread-safe caching with copy-on-write semantics - Implemented typed module caching in the `QueryEngine` - Updated `ServerState` and `Session` to utilize the new caching system ## Performance Improvements These changes show promising performance improvements in processing didChange events, particularly for larger projects. Here are some benchmarks using the FUSD libraries Sway project (19 Sway files): | Build | Before | After | Improvement | |-------|--------|-------|-------------| | Debug | 448.25ms | 123.73ms | 72.4% faster | | Release | 103.83ms | 53.59ms | 48.4% faster | These improvements should lead to a more responsive development experience, especially when making frequent small changes. closes #6228 improves: https://github.com/FuelLabs/sway-vscode-plugin/issues/172 ## Checklist - [x] I have linked to any relevant issues. - [x] I have commented my code, particularly in hard-to-understand areas. - [x] I have updated the documentation where relevant (API docs, the reference, and the Sway book). - [x] If my change requires substantial documentation changes, I have [requested support from the DevRel team](https://github.com/FuelLabs/devrel-requests/issues/new/choose) - [x] I have added tests that prove my fix is effective or that my feature works. - [x] I have added (or requested a maintainer to add) the necessary `Breaking*` or `New Feature` labels where relevant. - [x] I have done my best to ensure that my PR adheres to [the Fuel Labs Code Review Standards](https://github.com/FuelLabs/rfcs/blob/master/text/code-standards/external-contributors.md). - [x] I have requested a review from the relevant team or maintainers. --- sway-core/src/decl_engine/engine.rs | 49 +++- sway-core/src/decl_engine/parsed_engine.rs | 42 ++- sway-core/src/engine_threading.rs | 8 + sway-core/src/language/ty/module.rs | 2 +- sway-core/src/language/ty/program.rs | 4 +- sway-core/src/lib.rs | 143 ++++++---- sway-core/src/query_engine/mod.rs | 251 ++++++++++++++++-- sway-core/src/semantic_analysis/module.rs | 114 +++++++- .../semantic_analysis/node_dependencies.rs | 2 +- sway-core/src/semantic_analysis/program.rs | 11 +- sway-core/src/type_system/engine.rs | 25 +- sway-lsp/src/capabilities/diagnostic.rs | 1 - sway-lsp/src/core/session.rs | 25 +- sway-lsp/src/server_state.rs | 136 +++++++--- sway-lsp/tests/integration/lsp.rs | 45 ++++ sway-lsp/tests/lib.rs | 76 +++--- sway-parse/src/module.rs | 2 - 17 files changed, 752 insertions(+), 184 deletions(-) diff --git a/sway-core/src/decl_engine/engine.rs b/sway-core/src/decl_engine/engine.rs index 6dca672b734..65033e65f9e 100644 --- a/sway-core/src/decl_engine/engine.rs +++ b/sway-core/src/decl_engine/engine.rs @@ -5,7 +5,7 @@ use std::{ sync::Arc, }; -use sway_types::{Named, ProgramId, Spanned}; +use sway_types::{Named, ProgramId, SourceId, Spanned}; use crate::{ concurrent_slab::ConcurrentSlab, @@ -459,6 +459,53 @@ decl_engine_clear_program!( type_alias_slab, ty::TyTypeAliasDecl; ); +macro_rules! decl_engine_clear_module { + ($($slab:ident, $decl:ty);* $(;)?) => { + impl DeclEngine { + pub fn clear_module(&mut self, source_id: &SourceId) { + self.parents.write().retain(|key, _| { + match key { + AssociatedItemDeclId::TraitFn(decl_id) => { + self.get_trait_fn(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id) + }, + AssociatedItemDeclId::Function(decl_id) => { + self.get_function(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id) + }, + AssociatedItemDeclId::Type(decl_id) => { + self.get_type(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id) + }, + AssociatedItemDeclId::Constant(decl_id) => { + self.get_constant(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id) + }, + } + }); + + $( + self.$slab.retain(|_k, ty| match ty.span().source_id() { + Some(src_id) => src_id != source_id, + None => true, + }); + )* + } + } + }; +} + +decl_engine_clear_module!( + function_slab, ty::TyFunctionDecl; + trait_slab, ty::TyTraitDecl; + trait_fn_slab, ty::TyTraitFn; + trait_type_slab, ty::TyTraitType; + impl_self_or_trait_slab, ty::TyImplTrait; + struct_slab, ty::TyStructDecl; + storage_slab, ty::TyStorageDecl; + abi_slab, ty::TyAbiDecl; + constant_slab, ty::TyConstantDecl; + configurable_slab, ty::TyConfigurableDecl; + enum_slab, ty::TyEnumDecl; + type_alias_slab, ty::TyTypeAliasDecl; +); + impl DeclEngine { /// Given a [DeclRef] `index`, finds all the parents of `index` and all the /// recursive parents of those parents, and so on. Does not perform diff --git a/sway-core/src/decl_engine/parsed_engine.rs b/sway-core/src/decl_engine/parsed_engine.rs index 6977a86e7f6..2177131ceb4 100644 --- a/sway-core/src/decl_engine/parsed_engine.rs +++ b/sway-core/src/decl_engine/parsed_engine.rs @@ -9,7 +9,7 @@ use crate::{ }; use std::sync::Arc; -use sway_types::{ProgramId, Spanned}; +use sway_types::{ProgramId, SourceId, Spanned}; use super::parsed_id::ParsedDeclId; @@ -191,6 +191,46 @@ decl_engine_clear_program!( .span()), ); +macro_rules! decl_engine_clear_module { + ($(($slab:ident, $getter:expr)),* $(,)?) => { + impl ParsedDeclEngine { + pub fn clear_module(&mut self, program_id: &SourceId) { + $( + self.$slab.retain(|_k, item| { + #[allow(clippy::redundant_closure_call)] + let span = $getter(item); + match span.source_id() { + Some(src_id) => src_id != program_id, + None => true, + } + }); + )* + } + } + }; +} + +decl_engine_clear_module!( + (variable_slab, |item: &VariableDeclaration| item.name.span()), + (function_slab, |item: &FunctionDeclaration| item.name.span()), + (trait_slab, |item: &TraitDeclaration| item.name.span()), + (trait_fn_slab, |item: &TraitFn| item.name.span()), + (trait_type_slab, |item: &TraitTypeDeclaration| item + .name + .span()), + (impl_self_or_trait_slab, |item: &ImplSelfOrTrait| item + .block_span + .clone()), + (struct_slab, |item: &StructDeclaration| item.name.span()), + (storage_slab, |item: &StorageDeclaration| item.span.clone()), + (abi_slab, |item: &AbiDeclaration| item.name.span()), + (constant_slab, |item: &ConstantDeclaration| item.name.span()), + (enum_slab, |item: &EnumDeclaration| item.name.span()), + (type_alias_slab, |item: &TypeAliasDeclaration| item + .name + .span()), +); + impl ParsedDeclEngine { /// Friendly helper method for calling the `get` method from the /// implementation of [ParsedDeclEngineGet] for [ParsedDeclEngine] diff --git a/sway-core/src/engine_threading.rs b/sway-core/src/engine_threading.rs index c698d9700f4..e0e151249eb 100644 --- a/sway-core/src/engine_threading.rs +++ b/sway-core/src/engine_threading.rs @@ -48,6 +48,14 @@ impl Engines { self.parsed_decl_engine.clear_program(program_id); } + /// Removes all data associated with `source_id` from the declaration and type engines. + /// It is intended to be used during garbage collection to remove any data that is no longer needed. + pub fn clear_module(&mut self, source_id: &sway_types::SourceId) { + self.type_engine.clear_module(source_id); + self.decl_engine.clear_module(source_id); + self.parsed_decl_engine.clear_module(source_id); + } + /// Helps out some `thing: T` by adding `self` as context. pub fn help_out(&self, thing: T) -> WithEngines<'_, T> { WithEngines { diff --git a/sway-core/src/language/ty/module.rs b/sway-core/src/language/ty/module.rs index ef86db90cd3..740c7e14588 100644 --- a/sway-core/src/language/ty/module.rs +++ b/sway-core/src/language/ty/module.rs @@ -75,7 +75,7 @@ impl TyModule { #[derive(Clone, Debug)] pub struct TySubmodule { - pub module: TyModule, + pub module: Arc, pub mod_name_span: Span, } diff --git a/sway-core/src/language/ty/program.rs b/sway-core/src/language/ty/program.rs index 92d273f1526..b2257451574 100644 --- a/sway-core/src/language/ty/program.rs +++ b/sway-core/src/language/ty/program.rs @@ -497,7 +497,7 @@ impl CollectTypesMetadata for TyProgram { for module in std::iter::once(&self.root).chain( self.root .submodules_recursive() - .map(|(_, submod)| &submod.module), + .map(|(_, submod)| &*submod.module), ) { for node in module.all_nodes.iter() { let is_generic_function = node.is_generic_function(decl_engine); @@ -526,7 +526,7 @@ impl CollectTypesMetadata for TyProgram { for module in std::iter::once(&self.root).chain( self.root .submodules_recursive() - .map(|(_, submod)| &submod.module), + .map(|(_, submod)| &*submod.module), ) { for node in module.all_nodes.iter() { if node.is_test_function(decl_engine) { diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index bdc7e683ceb..0046e2c9624 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -33,7 +33,7 @@ use control_flow_analysis::ControlFlowGraph; pub use debug_generation::write_dwarf; use indexmap::IndexMap; use metadata::MetadataManager; -use query_engine::{ModuleCacheKey, ModulePath, ProgramsCacheEntry}; +use query_engine::{ModuleCacheKey, ModuleCommonInfo, ParsedModuleInfo, ProgramsCacheEntry}; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; @@ -236,7 +236,7 @@ fn parse_in_memory( pub struct Submodule { name: Ident, - path: ModulePath, + path: Arc, lexed: lexed::LexedSubmodule, parsed: parsed::ParseSubmodule, } @@ -259,7 +259,6 @@ fn parse_submodules( ) -> Submodules { // Assume the happy path, so there'll be as many submodules as dependencies, but no more. let mut submods = Vec::with_capacity(module.submodules().count()); - module.submodules().for_each(|submod| { // Read the source code from the dependency. // If we cannot, record as an error, but continue with other files. @@ -275,7 +274,6 @@ fn parse_submodules( return; } }; - if let Ok(ParsedModuleTree { tree_type: kind, lexed_module, @@ -318,7 +316,6 @@ fn parse_submodules( submods.push(submodule); } }); - submods } @@ -411,15 +408,19 @@ fn parse_module_tree( let version = lsp_mode .and_then(|lsp| lsp.file_versions.get(path.as_ref()).copied()) .unwrap_or(None); - let cache_entry = ModuleCacheEntry { - path, - modified_time, - hash, - dependencies, + + let common_info = ModuleCommonInfo { + path: path.clone(), include_tests, + dependencies, + hash, + }; + let parsed_info = ParsedModuleInfo { + modified_time, version, }; - query_engine.insert_parse_module_cache_entry(cache_entry); + let cache_entry = ModuleCacheEntry::new(common_info, parsed_info); + query_engine.update_or_insert_parsed_module_cache_entry(cache_entry); Ok(ParsedModuleTree { tree_type: kind, @@ -428,59 +429,89 @@ fn parse_module_tree( }) } -fn is_parse_module_cache_up_to_date( +/// Checks if the typed module cache for a given path is up to date. +/// +/// This function determines whether the cached typed representation of a module +/// is still valid based on file versions and dependencies. +/// +/// Note: This functionality is currently only supported when the compiler is +/// initiated from the language server. +pub(crate) fn is_ty_module_cache_up_to_date( engines: &Engines, path: &Arc, include_tests: bool, build_config: Option<&BuildConfig>, ) -> bool { - let query_engine = engines.qe(); + let cache = engines.qe().module_cache.read(); let key = ModuleCacheKey::new(path.clone(), include_tests); - let entry = query_engine.get_parse_module_cache_entry(&key); - match entry { - Some(entry) => { - // Let's check if we can re-use the dependency information - // we got from the cache. + cache.get(&key).map_or(false, |entry| { + entry.typed.as_ref().map_or(false, |typed| { + // Check if the cache is up to date based on file versions let cache_up_to_date = build_config - .as_ref() .and_then(|x| x.lsp_mode.as_ref()) - .and_then(|lsp| { - // First try to get the file version from lsp if it exists - lsp.file_versions.get(path.as_ref()) - }) - .map_or_else( - || { - // Otherwise we can safely read the file from disk here, as the LSP has not modified it, or we are not in LSP mode. - // Check if the file has been modified or if its hash is the same as the last compilation - let modified_time = std::fs::metadata(path.as_path()) - .ok() - .and_then(|m| m.modified().ok()); - entry.modified_time == modified_time || { - let src = std::fs::read_to_string(path.as_path()).unwrap(); - let mut hasher = DefaultHasher::new(); - src.hash(&mut hasher); - let hash = hasher.finish(); - hash == entry.hash - } - }, - |version| { - // The cache is invalid if the lsp version is greater than the last compilation - !version.map_or(false, |v| v > entry.version.unwrap_or(0)) - }, - ); - - // Look at the dependencies recursively to make sure they have not been - // modified either. - if cache_up_to_date { - entry.dependencies.iter().all(|path| { - is_parse_module_cache_up_to_date(engines, path, include_tests, build_config) + .and_then(|lsp| lsp.file_versions.get(path.as_ref())) + .map_or(true, |version| { + version.map_or(true, |v| typed.version.map_or(false, |tv| v <= tv)) + }); + + // If the cache is up to date, recursively check all dependencies + cache_up_to_date + && entry.common.dependencies.iter().all(|dep_path| { + is_ty_module_cache_up_to_date(engines, dep_path, include_tests, build_config) }) - } else { - false - } - } - None => false, - } + }) + }) +} + +/// Checks if the parsed module cache for a given path is up to date. +/// +/// This function determines whether the cached parsed representation of a module +/// is still valid based on file versions, modification times, or content hashes. +pub(crate) fn is_parse_module_cache_up_to_date( + engines: &Engines, + path: &Arc, + include_tests: bool, + build_config: Option<&BuildConfig>, +) -> bool { + let cache = engines.qe().module_cache.read(); + let key = ModuleCacheKey::new(path.clone(), include_tests); + cache.get(&key).map_or(false, |entry| { + // Determine if the cached dependency information is still valid + let cache_up_to_date = build_config + .and_then(|x| x.lsp_mode.as_ref()) + .and_then(|lsp| lsp.file_versions.get(path.as_ref())) + .map_or_else( + || { + // If LSP mode is not active or file version is unavailable, fall back to filesystem checks. + let modified_time = std::fs::metadata(path.as_path()) + .ok() + .and_then(|m| m.modified().ok()); + // Check if modification time matches, or if not, compare file content hash + entry.parsed.modified_time == modified_time || { + let src = std::fs::read_to_string(path.as_path()).unwrap(); + let mut hasher = DefaultHasher::new(); + src.hash(&mut hasher); + hasher.finish() == entry.common.hash + } + }, + |version| { + // Determine if the parse cache is up-to-date in LSP mode: + // - If there's no LSP file version (version is None), consider the cache up-to-date. + // - If there is an LSP file version: + // - If there's no cached version (entry.parsed.version is None), the cache is outdated. + // - If there's a cached version, compare them: cache is up-to-date if the LSP file version + // is not greater than the cached version. + version.map_or(true, |v| entry.parsed.version.map_or(false, |ev| v <= ev)) + }, + ); + + // Checks if the typed module cache for a given path is up to date// If the cache is up to date, recursively check all dependencies to make sure they have not been + // modified either. + cache_up_to_date + && entry.common.dependencies.iter().all(|dep_path| { + is_parse_module_cache_up_to_date(engines, dep_path, include_tests, build_config) + }) + }) } fn module_path( @@ -697,12 +728,12 @@ pub fn compile_to_ast( retrigger_compilation: Option>, ) -> Result { check_should_abort(handler, retrigger_compilation.clone())?; + let query_engine = engines.qe(); let mut metrics = PerformanceData::default(); if let Some(config) = build_config { let path = config.canonical_root_module(); let include_tests = config.include_tests; - // Check if we can re-use the data in the cache. if is_parse_module_cache_up_to_date(engines, &path, include_tests, build_config) { let mut entry = query_engine.get_programs_cache_entry(&path).unwrap(); diff --git a/sway-core/src/query_engine/mod.rs b/sway-core/src/query_engine/mod.rs index 8f7e248291f..c05efb384f2 100644 --- a/sway-core/src/query_engine/mod.rs +++ b/sway-core/src/query_engine/mod.rs @@ -1,14 +1,19 @@ -use parking_lot::RwLock; -use std::{collections::HashMap, path::PathBuf, sync::Arc, time::SystemTime}; -use sway_error::error::CompileError; -use sway_error::warning::CompileWarning; +use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard}; +use std::{ + collections::HashMap, + ops::{Deref, DerefMut}, + path::PathBuf, + sync::Arc, + time::SystemTime, +}; +use sway_error::{error::CompileError, warning::CompileWarning}; use sway_types::IdentUnique; -use crate::decl_engine::{DeclId, DeclRef}; -use crate::language::ty::{TyFunctionDecl, TyFunctionSig}; -use crate::{Engines, Programs}; - -pub type ModulePath = Arc; +use crate::{ + decl_engine::{DeclId, DeclRef}, + language::ty::{TyFunctionDecl, TyFunctionSig, TyModule}, + {Engines, Programs}, +}; #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct ModuleCacheKey { @@ -26,16 +31,100 @@ impl ModuleCacheKey { } #[derive(Clone, Debug)] -pub struct ModuleCacheEntry { - pub path: ModulePath, - pub modified_time: Option, +pub struct ModuleCommonInfo { + pub path: Arc, pub hash: u64, - pub dependencies: Vec, pub include_tests: bool, + pub dependencies: Vec>, +} + +#[derive(Clone, Debug)] +pub struct ParsedModuleInfo { + pub modified_time: Option, pub version: Option, } -pub type ModuleCacheMap = HashMap; +#[derive(Clone, Debug)] +pub struct TypedModuleInfo { + pub module: Arc, + pub version: Option, +} + +#[derive(Clone, Debug)] +pub struct ModuleCacheEntry { + pub common: ModuleCommonInfo, + pub parsed: ParsedModuleInfo, + pub typed: Option, +} + +impl ModuleCacheEntry { + pub fn new(common: ModuleCommonInfo, parsed: ParsedModuleInfo) -> Self { + Self { + common, + parsed, + typed: None, + } + } + + pub fn is_typed(&self) -> bool { + self.typed.is_some() + } + + pub fn set_typed(&mut self, typed: TypedModuleInfo) { + self.typed = Some(typed); + } + + pub fn update_common(&mut self, new_common: ModuleCommonInfo) { + self.common = new_common; + } + + pub fn update_parsed(&mut self, new_parsed: ParsedModuleInfo) { + self.parsed = new_parsed; + } + + pub fn update_parsed_and_common( + &mut self, + new_common: ModuleCommonInfo, + new_parsed: ParsedModuleInfo, + ) { + self.common = new_common; + self.parsed = new_parsed; + } +} + +#[derive(Debug, Default, Clone)] +pub struct ModuleCacheMap(HashMap); + +impl Deref for ModuleCacheMap { + type Target = HashMap; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for ModuleCacheMap { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl ModuleCacheMap { + pub fn update_entry( + &mut self, + key: &ModuleCacheKey, + new_common: ModuleCommonInfo, + new_parsed: ParsedModuleInfo, + ) { + if let Some(entry) = self.get_mut(key) { + entry.update_parsed_and_common(new_common, new_parsed); + } else { + self.insert(key.clone(), ModuleCacheEntry::new(new_common, new_parsed)); + } + } +} + +pub type ProgramsCacheMap = HashMap, ProgramsCacheEntry>; +pub type FunctionsCacheMap = HashMap<(IdentUnique, String), FunctionCacheEntry>; #[derive(Clone, Debug)] pub struct ProgramsCacheEntry { @@ -44,35 +133,41 @@ pub struct ProgramsCacheEntry { pub handler_data: (Vec, Vec), } -pub type ProgramsCacheMap = HashMap, ProgramsCacheEntry>; - #[derive(Clone, Debug)] pub struct FunctionCacheEntry { pub fn_decl: DeclRef>, } -pub type FunctionsCacheMap = HashMap<(IdentUnique, String), FunctionCacheEntry>; - -#[derive(Debug, Default, Clone)] +#[derive(Debug, Default)] pub struct QueryEngine { // We want the below types wrapped in Arcs to optimize cloning from LSP. - parse_module_cache: Arc>, programs_cache: Arc>, function_cache: Arc>, + pub module_cache: CowCache, } -impl QueryEngine { - pub fn get_parse_module_cache_entry(&self, path: &ModuleCacheKey) -> Option { - let cache = self.parse_module_cache.read(); - cache.get(path).cloned() +impl Clone for QueryEngine { + fn clone(&self) -> Self { + Self { + programs_cache: self.programs_cache.clone(), + function_cache: self.function_cache.clone(), + module_cache: CowCache::new(self.module_cache.read().clone()), + } } +} - pub fn insert_parse_module_cache_entry(&self, entry: ModuleCacheEntry) { - let path = entry.path.clone(); - let include_tests = entry.include_tests; +impl QueryEngine { + pub fn update_or_insert_parsed_module_cache_entry(&self, entry: ModuleCacheEntry) { + let path = entry.common.path.clone(); + let include_tests = entry.common.include_tests; let key = ModuleCacheKey::new(path, include_tests); - let mut cache = self.parse_module_cache.write(); - cache.insert(key, entry); + let mut cache = self.module_cache.write(); + cache.update_entry(&key, entry.common, entry.parsed); + } + + pub fn update_typed_module_cache_entry(&self, key: &ModuleCacheKey, entry: TypedModuleInfo) { + let mut cache = self.module_cache.write(); + cache.get_mut(key).unwrap().set_typed(entry); } pub fn get_programs_cache_entry(&self, path: &Arc) -> Option { @@ -111,3 +206,101 @@ impl QueryEngine { ); } } + +/// Thread-safe, copy-on-write cache optimized for LSP operations. +/// +/// Addresses key LSP challenges: +/// 1. Concurrent read access to shared data +/// 2. Local modifications for cancellable operations (e.g., compilation) +/// 3. Prevents incomplete results from affecting shared state +/// 4. Maintains consistency via explicit commit step +/// +/// Uses `Arc>` for shared state and `RwLock>` for local changes. +/// Suitable for interactive sessions with frequent file changes. +#[derive(Debug, Default)] +pub struct CowCache { + inner: Arc>, + local: RwLock>, +} + +impl CowCache { + /// Creates a new `CowCache` with the given initial value. + /// + /// The value is wrapped in an `Arc>` to allow shared access across threads. + pub fn new(value: T) -> Self { + Self { + inner: Arc::new(RwLock::new(value)), + local: RwLock::new(None), + } + } + + /// Provides read access to the cached value. + /// + /// If a local modification exists, it returns a reference to the local copy. + /// Otherwise, it returns a reference to the shared state. + /// + /// This method is optimized for concurrent read access in LSP operations. + pub fn read(&self) -> impl Deref + '_ { + if self.local.read().is_some() { + ReadGuard::Local(self.local.read()) + } else { + ReadGuard::Shared(self.inner.read()) + } + } + + /// Provides write access to a local copy of the cached value. + /// + /// In LSP, this is used for operations like compilation tasks that may be cancelled. + /// It allows modifications without affecting the shared state until explicitly committed. + pub fn write(&self) -> impl DerefMut + '_ { + let mut local = self.local.write(); + if local.is_none() { + *local = Some(self.inner.read().clone()); + } + WriteGuard(local) + } + + /// Commits local modifications to the shared state. + /// + /// Called after successful completion of a compilation task. + /// If a task is cancelled, not calling this method effectively discards local changes. + pub fn commit(&self) { + if let Some(local) = self.local.write().take() { + *self.inner.write() = local; + } + } +} + +/// A guard type that provides read access to either the local or shared state. +enum ReadGuard<'a, T: Clone> { + Local(RwLockReadGuard<'a, Option>), + Shared(RwLockReadGuard<'a, T>), +} + +impl<'a, T: Clone> Deref for ReadGuard<'a, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + match self { + ReadGuard::Local(r) => r.as_ref().unwrap(), + ReadGuard::Shared(guard) => guard.deref(), + } + } +} + +/// A guard type that provides write access to the local state. +struct WriteGuard<'a, T: Clone>(RwLockWriteGuard<'a, Option>); + +impl<'a, T: Clone> Deref for WriteGuard<'a, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + self.0.as_ref().unwrap() + } +} + +impl<'a, T: Clone> DerefMut for WriteGuard<'a, T> { + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.as_mut().unwrap() + } +} diff --git a/sway-core/src/semantic_analysis/module.rs b/sway-core/src/semantic_analysis/module.rs index e56e1847d56..b52f172bfed 100644 --- a/sway-core/src/semantic_analysis/module.rs +++ b/sway-core/src/semantic_analysis/module.rs @@ -2,6 +2,7 @@ use std::{ collections::{HashMap, HashSet}, fmt::Display, fs, + sync::Arc, }; use graph_cycles::Cycles; @@ -9,18 +10,20 @@ use sway_error::{ error::CompileError, handler::{ErrorEmitted, Handler}, }; -use sway_types::{BaseIdent, Named}; +use sway_types::{BaseIdent, Named, SourceId}; use crate::{ decl_engine::{DeclEngineGet, DeclId}, engine_threading::{DebugWithEngines, PartialEqWithEngines, PartialEqWithEnginesContext}, + is_ty_module_cache_up_to_date, language::{ parsed::*, ty::{self, TyAstNodeContent, TyDecl}, CallPath, ModName, }, + query_engine::{ModuleCacheKey, TypedModuleInfo}, semantic_analysis::*, - Engines, TypeInfo, + BuildConfig, Engines, TypeInfo, }; use super::{ @@ -254,6 +257,42 @@ impl ty::TyModule { Ok(()) } + /// Retrieves a cached typed module if it's up to date. + /// + /// This function checks the cache for a typed module corresponding to the given source ID. + /// If found and up to date, it returns the cached module. Otherwise, it returns None. + fn get_cached_ty_module_if_up_to_date( + source_id: Option<&SourceId>, + engines: &Engines, + build_config: Option<&BuildConfig>, + ) -> Option> { + let source_id = source_id?; + + // Create a cache key and get the module cache + let path = engines.se().get_path(source_id); + let include_tests = build_config.map_or(false, |x| x.include_tests); + let key = ModuleCacheKey::new(path.clone().into(), include_tests); + let cache = engines.qe().module_cache.read(); + cache.get(&key).and_then(|entry| { + entry.typed.as_ref().and_then(|typed| { + // Check if the cached module is up to date + let is_up_to_date = is_ty_module_cache_up_to_date( + engines, + &path.into(), + include_tests, + build_config, + ); + + // Return the cached module if it's up to date, otherwise None + if is_up_to_date { + Some(typed.module.clone()) + } else { + None + } + }) + }) + } + /// Type-check the given parsed module to produce a typed module. /// /// Recursively type-checks submodules first. @@ -263,7 +302,8 @@ impl ty::TyModule { engines: &Engines, kind: TreeType, parsed: &ParseModule, - ) -> Result { + build_config: Option<&BuildConfig>, + ) -> Result, ErrorEmitted> { let ParseModule { submodules, tree, @@ -273,25 +313,51 @@ impl ty::TyModule { .. } = parsed; + // Try to get the cached root module if it's up to date + if let Some(module) = ty::TyModule::get_cached_ty_module_if_up_to_date( + parsed.span.source_id(), + engines, + build_config, + ) { + return Ok(module); + } + // Type-check submodules first in order of evaluation previously computed by the dependency graph. let submodules_res = module_eval_order .iter() .map(|eval_mod_name| { let (name, submodule) = submodules .iter() - .find(|(submod_name, _submodule)| eval_mod_name == submod_name) + .find(|(submod_name, _)| eval_mod_name == submod_name) .unwrap(); - Ok(( - name.clone(), - ty::TySubmodule::type_check( + + // Try to get the cached submodule + if let Some(cached_module) = ty::TyModule::get_cached_ty_module_if_up_to_date( + submodule.module.span.source_id(), + engines, + build_config, + ) { + // If cached, create TySubmodule from cached module + Ok::<(BaseIdent, ty::TySubmodule), ErrorEmitted>(( + name.clone(), + ty::TySubmodule { + module: cached_module, + mod_name_span: submodule.mod_name_span.clone(), + }, + )) + } else { + // If not cached, type-check the submodule + let type_checked_submodule = ty::TySubmodule::type_check( handler, ctx.by_ref(), engines, name.clone(), kind, submodule, - )?, - )) + build_config, + )?; + Ok((name.clone(), type_checked_submodule)) + } }) .collect::, _>>(); @@ -303,7 +369,6 @@ impl ty::TyModule { )?; let mut all_nodes = Self::type_check_nodes(handler, ctx.by_ref(), &ordered_nodes)?; - let submodules = submodules_res?; let fallback_fn = collect_fallback_fn(&all_nodes, engines, handler)?; @@ -394,13 +459,34 @@ impl ty::TyModule { } } - Ok(Self { + let ty_module = Arc::new(Self { span: span.clone(), submodules, namespace: ctx.namespace.clone(), all_nodes, attributes: attributes.clone(), - }) + }); + + // Cache the ty module + if let Some(source_id) = span.source_id() { + let path = engines.se().get_path(source_id); + let version = build_config + .and_then(|config| config.lsp_mode.as_ref()) + .and_then(|lsp| lsp.file_versions.get(&path).copied()) + .flatten(); + + let include_tests = build_config.map_or(false, |x| x.include_tests); + let key = ModuleCacheKey::new(path.clone().into(), include_tests); + engines.qe().update_typed_module_cache_entry( + &key, + TypedModuleInfo { + module: ty_module.clone(), + version, + }, + ); + } + + Ok(ty_module) } // Filter and gather impl items @@ -615,6 +701,7 @@ impl ty::TySubmodule { mod_name: ModName, kind: TreeType, submodule: &ParseSubmodule, + build_config: Option<&BuildConfig>, ) -> Result { let ParseSubmodule { module, @@ -622,7 +709,8 @@ impl ty::TySubmodule { visibility, } = submodule; parent_ctx.enter_submodule(mod_name, *visibility, module.span.clone(), |submod_ctx| { - let module_res = ty::TyModule::type_check(handler, submod_ctx, engines, kind, module); + let module_res = + ty::TyModule::type_check(handler, submod_ctx, engines, kind, module, build_config); module_res.map(|module| ty::TySubmodule { module, mod_name_span: mod_name_span.clone(), diff --git a/sway-core/src/semantic_analysis/node_dependencies.rs b/sway-core/src/semantic_analysis/node_dependencies.rs index 6a577d7caae..398c590566a 100644 --- a/sway-core/src/semantic_analysis/node_dependencies.rs +++ b/sway-core/src/semantic_analysis/node_dependencies.rs @@ -42,7 +42,7 @@ pub(crate) fn order_ast_nodes_by_dependency( Ok(()) })?; - // Reorder the parsed AstNodes based on dependency. Includes first, then uses, then + // Reorder the parsed AstNodes based on dependency. Includes first, then uses, then // reordered declarations, then anything else. To keep the list stable and simple we can // use a basic insertion sort. Ok(nodes diff --git a/sway-core/src/semantic_analysis/program.rs b/sway-core/src/semantic_analysis/program.rs index ab83749d2f5..c70a442fc1a 100644 --- a/sway-core/src/semantic_analysis/program.rs +++ b/sway-core/src/semantic_analysis/program.rs @@ -60,7 +60,14 @@ impl TyProgram { let ParseProgram { root, kind } = parsed; - let root = ty::TyModule::type_check(handler, ctx.by_ref(), engines, parsed.kind, root)?; + let root = ty::TyModule::type_check( + handler, + ctx.by_ref(), + engines, + parsed.kind, + root, + build_config, + )?; let (kind, declarations, configurables) = Self::validate_root( handler, @@ -73,7 +80,7 @@ impl TyProgram { let program = TyProgram { kind, - root, + root: (*root).clone(), declarations, configurables, storage_slots: vec![], diff --git a/sway-core/src/type_system/engine.rs b/sway-core/src/type_system/engine.rs index e3b945e1484..206bed81bb2 100644 --- a/sway-core/src/type_system/engine.rs +++ b/sway-core/src/type_system/engine.rs @@ -67,16 +67,25 @@ impl TypeEngine { } } + fn clear_items(&mut self, keep: F) + where + F: Fn(&SourceId) -> bool, + { + self.slab + .retain(|_, tsi| tsi.source_id.as_ref().map_or(true, &keep)); + self.id_map + .write() + .retain(|tsi, _| tsi.source_id.as_ref().map_or(true, &keep)); + } + /// Removes all data associated with `program_id` from the type engine. pub fn clear_program(&mut self, program_id: &ProgramId) { - self.slab.retain(|_, tsi| match tsi.source_id { - Some(source_id) => &source_id.program_id() != program_id, - None => true, - }); - self.id_map.write().retain(|tsi, _| match tsi.source_id { - Some(source_id) => &source_id.program_id() != program_id, - None => true, - }); + self.clear_items(|id| id.program_id() != *program_id); + } + + /// Removes all data associated with `source_id` from the type engine. + pub fn clear_module(&mut self, source_id: &SourceId) { + self.clear_items(|id| id != source_id); } pub fn replace(&self, id: TypeId, new_value: TypeSourceInfo) { diff --git a/sway-lsp/src/capabilities/diagnostic.rs b/sway-lsp/src/capabilities/diagnostic.rs index d5121d21914..5bdeee39fe8 100644 --- a/sway-lsp/src/capabilities/diagnostic.rs +++ b/sway-lsp/src/capabilities/diagnostic.rs @@ -61,7 +61,6 @@ pub fn get_diagnostics( diagnostics.entry(path).or_default().errors.push(diagnostic); } } - diagnostics } diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 6dab2952254..edcda82ad8c 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -124,7 +124,10 @@ impl Session { } /// Clean up memory in the [TypeEngine] and [DeclEngine] for the user's workspace. - pub fn garbage_collect(&self, engines: &mut Engines) -> Result<(), LanguageServerError> { + pub fn garbage_collect_program( + &self, + engines: &mut Engines, + ) -> Result<(), LanguageServerError> { let _p = tracing::trace_span!("garbage_collect").entered(); let path = self.sync.temp_dir()?; let program_id = { engines.se().get_program_id(&path) }; @@ -134,6 +137,18 @@ impl Session { Ok(()) } + /// Clean up memory in the [TypeEngine] and [DeclEngine] for the modified file. + pub fn garbage_collect_module( + &self, + engines: &mut Engines, + uri: &Url, + ) -> Result<(), LanguageServerError> { + let path = uri.to_file_path().unwrap(); + let source_id = { engines.se().get_source_id(&path) }; + engines.clear_module(&source_id); + Ok(()) + } + pub fn token_ranges(&self, url: &Url, position: Position) -> Option> { let _p = tracing::trace_span!("token_ranges").entered(); let mut token_ranges: Vec<_> = self @@ -264,13 +279,12 @@ pub fn compile( experimental: sway_core::ExperimentalFlags, ) -> Result, Handler)>, LanguageServerError> { let _p = tracing::trace_span!("compile").entered(); - let tests_enabled = true; pkg::check( build_plan, BuildTarget::default(), true, lsp_mode, - tests_enabled, + true, engines, retrigger_compilation, experimental, @@ -387,6 +401,7 @@ pub fn parse_project( let build_plan = session .build_plan_cache .get_or_update(&session.sync.manifest_path(), || build_plan(uri))?; + let results = compile( &build_plan, engines, @@ -397,6 +412,7 @@ pub fn parse_project( if results.last().is_none() { return Err(LanguageServerError::ProgramsIsNone); } + let diagnostics = traverse(results, engines, session.clone())?; if let Some(config) = &lsp_mode { // Only write the diagnostics results on didSave or didOpen. @@ -407,6 +423,7 @@ pub fn parse_project( } } } + if let Some(typed) = &session.compiled_program.read().typed { session.runnables.clear(); create_runnables(&session.runnables, typed, engines.de(), engines.se()); @@ -464,6 +481,7 @@ fn create_runnables( ) { let _p = tracing::trace_span!("create_runnables").entered(); // Insert runnable test functions. + for (decl, _) in typed_program.test_fns(decl_engine) { // Get the span of the first attribute if it exists, otherwise use the span of the function name. let span = decl @@ -480,7 +498,6 @@ fn create_runnables( runnables.entry(path).or_default().push(runnable); } } - // Insert runnable main function if the program is a script. if let ty::TyProgramKind::Script { entry_function: ref main_function, diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 3f7760b4544..b50f546cf97 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -41,7 +41,7 @@ pub struct ServerState { /// A Least Recently Used (LRU) cache of [Session]s, each representing a project opened in the user's workspace. /// This cache limits memory usage by maintaining a fixed number of active sessions, automatically /// evicting the least recently used sessions when the capacity is reached. - pub(crate) sessions: LruSessionCache, + pub sessions: LruSessionCache, pub documents: Documents, // Compilation thread related fields pub(crate) retrigger_compilation: Arc, @@ -142,7 +142,9 @@ impl ServerState { { // Call this on the engines clone so we don't clear types that are still in use // and might be needed in the case cancel compilation was triggered. - if let Err(err) = session.garbage_collect(&mut engines_clone) { + if let Err(err) = + session.garbage_collect_module(&mut engines_clone, &uri) + { tracing::error!( "Unable to perform garbage collection: {}", err.to_string() @@ -155,7 +157,6 @@ impl ServerState { optimized_build: ctx.optimized_build, file_versions: ctx.file_versions, }); - // Set the is_compiling flag to true so that the wait_for_parsing function knows that we are compiling is_compiling.store(true, Ordering::SeqCst); match session::parse_project( @@ -177,6 +178,10 @@ impl ServerState { // Because the engines_clone has garbage collection applied. If the workspace AST was reused, we need to keep the old engines // as the engines_clone might have cleared some types that are still in use. if metrics.reused_programs == 0 { + // Commit local changes in the module cache to the shared state. + // This ensures that any modifications made during compilation are preserved + // before we swap the engines. + engines_clone.qe().module_cache.commit(); // The compiler did not reuse the workspace AST. // We need to overwrite the old engines with the engines clone. mem::swap( @@ -334,16 +339,9 @@ impl ServerState { diagnostics_to_publish } - async fn init_session(&self, uri: &Url) -> Result<(), LanguageServerError> { - let session = Arc::new(Session::new()); - let project_name = session.init(uri, &self.documents).await?; - self.sessions.insert(project_name, session); - Ok(()) - } - /// Constructs and returns a tuple of `(Url, Arc)` from a given workspace URI. /// The returned URL represents the temp directory workspace. - pub(crate) async fn uri_and_session_from_workspace( + pub async fn uri_and_session_from_workspace( &self, workspace_uri: &Url, ) -> Result<(Url, Arc), LanguageServerError> { @@ -367,20 +365,23 @@ impl ServerState { .ok_or(DirectoryError::ManifestDirNotFound)? .to_path_buf(); - let session = self.sessions.get(&manifest_dir).unwrap_or({ - // If no session can be found, then we need to call init and insert a new session into the map - self.init_session(uri).await?; - self.sessions - .get(&manifest_dir) - .expect("no session found even though it was just inserted into the map") - }); + // If the session is already in the cache, return it + if let Some(session) = self.sessions.get(&manifest_dir) { + return Ok(session); + } + + // If no session can be found, then we need to call init and insert a new session into the map + let session = Arc::new(Session::new()); + session.init(uri, &self.documents).await?; + self.sessions.insert(manifest_dir.clone(), session.clone()); + Ok(session) } } /// A Least Recently Used (LRU) cache for storing and managing `Session` objects. /// This cache helps limit memory usage by maintaining a fixed number of active sessions. -pub(crate) struct LruSessionCache { +pub struct LruSessionCache { /// Stores the actual `Session` objects, keyed by their file paths. sessions: Arc>>, /// Keeps track of the order in which sessions were accessed, with most recent at the front. @@ -391,7 +392,7 @@ pub(crate) struct LruSessionCache { impl LruSessionCache { /// Creates a new `LruSessionCache` with the specified capacity. - pub(crate) fn new(capacity: usize) -> Self { + pub fn new(capacity: usize) -> Self { LruSessionCache { sessions: Arc::new(DashMap::new()), usage_order: Arc::new(Mutex::new(VecDeque::with_capacity(capacity))), @@ -399,12 +400,12 @@ impl LruSessionCache { } } - pub(crate) fn iter(&self) -> impl Iterator>> { + pub fn iter(&self) -> impl Iterator>> { self.sessions.iter() } /// Retrieves a session from the cache and updates its position to the front of the usage order. - pub(crate) fn get(&self, path: &PathBuf) -> Option> { + pub fn get(&self, path: &PathBuf) -> Option> { if let Some(session) = self.sessions.try_get(path).try_unwrap() { if self.sessions.len() >= self.capacity { self.move_to_front(path); @@ -418,16 +419,13 @@ impl LruSessionCache { /// Inserts or updates a session in the cache. /// If at capacity and inserting a new session, evicts the least recently used one. /// For existing sessions, updates their position in the usage order if at capacity. - pub(crate) fn insert(&self, path: PathBuf, session: Arc) { - if self.sessions.get(&path).is_some() { - tracing::trace!("Updating existing session for path: {:?}", path); - // Session already exists, just update its position in the usage order if at capacity - if self.sessions.len() >= self.capacity { - self.move_to_front(&path); - } + pub fn insert(&self, path: PathBuf, session: Arc) { + if let Some(mut entry) = self.sessions.get_mut(&path) { + // Session already exists, update it + *entry = session; + self.move_to_front(&path); } else { // New session - tracing::trace!("Inserting new session for path: {:?}", path); if self.sessions.len() >= self.capacity { self.evict_least_used(); } @@ -459,3 +457,81 @@ impl LruSessionCache { } } } + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + use std::sync::Arc; + + #[test] + fn test_lru_session_cache_insertion_and_retrieval() { + let cache = LruSessionCache::new(2); + let path1 = PathBuf::from("/path/1"); + let path2 = PathBuf::from("/path/2"); + let session1 = Arc::new(Session::new()); + let session2 = Arc::new(Session::new()); + + cache.insert(path1.clone(), session1.clone()); + cache.insert(path2.clone(), session2.clone()); + + assert!(Arc::ptr_eq(&cache.get(&path1).unwrap(), &session1)); + assert!(Arc::ptr_eq(&cache.get(&path2).unwrap(), &session2)); + } + + #[test] + fn test_lru_session_cache_capacity() { + let cache = LruSessionCache::new(2); + let path1 = PathBuf::from("/path/1"); + let path2 = PathBuf::from("/path/2"); + let path3 = PathBuf::from("/path/3"); + let session1 = Arc::new(Session::new()); + let session2 = Arc::new(Session::new()); + let session3 = Arc::new(Session::new()); + + cache.insert(path1.clone(), session1); + cache.insert(path2.clone(), session2); + cache.insert(path3.clone(), session3); + + assert!(cache.get(&path1).is_none()); + assert!(cache.get(&path2).is_some()); + assert!(cache.get(&path3).is_some()); + } + + #[test] + fn test_lru_session_cache_update_order() { + let cache = LruSessionCache::new(2); + let path1 = PathBuf::from("/path/1"); + let path2 = PathBuf::from("/path/2"); + let path3 = PathBuf::from("/path/3"); + let session1 = Arc::new(Session::new()); + let session2 = Arc::new(Session::new()); + let session3 = Arc::new(Session::new()); + + cache.insert(path1.clone(), session1.clone()); + cache.insert(path2.clone(), session2.clone()); + + // Access path1 to move it to the front + cache.get(&path1); + + // Insert path3, which should evict path2 + cache.insert(path3.clone(), session3); + + assert!(cache.get(&path1).is_some()); + assert!(cache.get(&path2).is_none()); + assert!(cache.get(&path3).is_some()); + } + + #[test] + fn test_lru_session_cache_overwrite() { + let cache = LruSessionCache::new(2); + let path1 = PathBuf::from("/path/1"); + let session1 = Arc::new(Session::new()); + let session1_new = Arc::new(Session::new()); + + cache.insert(path1.clone(), session1); + cache.insert(path1.clone(), session1_new.clone()); + + assert!(Arc::ptr_eq(&cache.get(&path1).unwrap(), &session1_new)); + } +} diff --git a/sway-lsp/tests/integration/lsp.rs b/sway-lsp/tests/integration/lsp.rs index e0d063871be..e6ce422616f 100644 --- a/sway-lsp/tests/integration/lsp.rs +++ b/sway-lsp/tests/integration/lsp.rs @@ -123,6 +123,51 @@ pub(crate) async fn did_change_request( did_change } +/// Simulates a keypress at the current cursor position +/// 66% chance of enter keypress +/// 33% chance of backspace keypress +pub fn simulate_keypress( + uri: &Url, + version: i32, + cursor_line: &mut u32, +) -> DidChangeTextDocumentParams { + if rand::random::() % 3 < 2 { + // enter keypress at current cursor line + *cursor_line += 1; + create_did_change_params( + uri, + version, + Position { + line: *cursor_line - 1, + character: 0, + }, + Position { + line: *cursor_line - 1, + character: 0, + }, + 0, + ) + } else { + // backspace keypress at current cursor line + if *cursor_line > 1 { + *cursor_line -= 1; + } + create_did_change_params( + uri, + version, + Position { + line: *cursor_line, + character: 0, + }, + Position { + line: *cursor_line + 1, + character: 0, + }, + 1, + ) + } +} + pub(crate) async fn show_ast_request( server: &ServerState, uri: &Url, diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index e0baeb3984e..5c0d38a7f43 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -220,9 +220,12 @@ fn did_change_stress_test_random_wait() { .join("generics_in_contract"); let uri = init_and_open(&mut service, example_dir.join("src/main.sw")).await; let times = 60; + + // Initialize cursor position + let mut cursor_line = 29; for version in 0..times { - //eprintln!("version: {}", version); - let _ = lsp::did_change_request(&mut service, &uri, version + 1, None).await; + let params = lsp::simulate_keypress(&uri, version, &mut cursor_line); + let _ = lsp::did_change_request(&mut service, &uri, version, Some(params)).await; if version == 0 { service.inner().wait_for_parsing().await; } @@ -265,39 +268,13 @@ fn garbage_collection_runner(path: PathBuf) { .gc_frequency = 1; let uri = init_and_open(&mut service, path).await; let times = 60; + + // Initialize cursor position + let mut cursor_line = 20; + for version in 1..times { //eprintln!("version: {}", version); - let params = if rand::random::() % 3 < 1 { - // enter keypress at line 20 - lsp::create_did_change_params( - &uri, - version, - Position { - line: 20, - character: 0, - }, - Position { - line: 20, - character: 0, - }, - 0, - ) - } else { - // backspace keypress at line 21 - lsp::create_did_change_params( - &uri, - version, - Position { - line: 20, - character: 0, - }, - Position { - line: 21, - character: 0, - }, - 1, - ) - }; + let params = lsp::simulate_keypress(&uri, version, &mut cursor_line); let _ = lsp::did_change_request(&mut service, &uri, version, Some(params)).await; if version == 0 { service.inner().wait_for_parsing().await; @@ -2159,3 +2136,36 @@ async fn write_all_example_asts() { } let _ = server.shutdown_server(); } + +#[test] +fn test_url_to_session_existing_session() { + use std::sync::Arc; + run_async!({ + let (mut service, _) = LspService::new(ServerState::new); + let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; + + // First call to uri_and_session_from_workspace + let (first_uri, first_session) = service + .inner() + .uri_and_session_from_workspace(&uri) + .await + .unwrap(); + + // Second call to uri_and_session_from_workspace + let (second_uri, second_session) = service + .inner() + .uri_and_session_from_workspace(&uri) + .await + .unwrap(); + + // Assert that the URIs are the same + assert_eq!(first_uri, second_uri, "URIs should be identical"); + + // Assert that the sessions are the same (they should point to the same Arc) + assert!( + Arc::ptr_eq(&first_session, &second_session), + "Sessions should be identical" + ); + shutdown_and_exit(&mut service).await; + }); +} diff --git a/sway-parse/src/module.rs b/sway-parse/src/module.rs index edd33c8bb9c..7936012e96a 100644 --- a/sway-parse/src/module.rs +++ b/sway-parse/src/module.rs @@ -60,9 +60,7 @@ impl ParseToEnd for Annotated { } } let (kind, semicolon_token) = parser.parse()?; - let (items, consumed) = parser.parse_to_end()?; - let module = Annotated { attribute_list, value: Module {