Skip to content

Commit

Permalink
Merge bc06646 into 0245223
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshuaBatty committed Aug 1, 2024
2 parents 0245223 + bc06646 commit ff415cc
Show file tree
Hide file tree
Showing 17 changed files with 752 additions and 184 deletions.
49 changes: 48 additions & 1 deletion sway-core/src/decl_engine/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::{
sync::Arc,
};

use sway_types::{Named, ProgramId, Spanned};
use sway_types::{Named, ProgramId, SourceId, Spanned};

use crate::{
concurrent_slab::ConcurrentSlab,
Expand Down Expand Up @@ -375,6 +375,53 @@ decl_engine_clear_program!(
type_alias_slab, ty::TyTypeAliasDecl;
);

macro_rules! decl_engine_clear_module {
($($slab:ident, $decl:ty);* $(;)?) => {
impl DeclEngine {
pub fn clear_module(&mut self, source_id: &SourceId) {
self.parents.write().retain(|key, _| {
match key {
AssociatedItemDeclId::TraitFn(decl_id) => {
self.get_trait_fn(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id)
},
AssociatedItemDeclId::Function(decl_id) => {
self.get_function(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id)
},
AssociatedItemDeclId::Type(decl_id) => {
self.get_type(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id)
},
AssociatedItemDeclId::Constant(decl_id) => {
self.get_constant(decl_id).span().source_id().map_or(true, |src_id| src_id != source_id)
},
}
});

$(
self.$slab.retain(|_k, ty| match ty.span().source_id() {
Some(src_id) => src_id != source_id,
None => true,
});
)*
}
}
};
}

decl_engine_clear_module!(
function_slab, ty::TyFunctionDecl;
trait_slab, ty::TyTraitDecl;
trait_fn_slab, ty::TyTraitFn;
trait_type_slab, ty::TyTraitType;
impl_self_or_trait_slab, ty::TyImplTrait;
struct_slab, ty::TyStructDecl;
storage_slab, ty::TyStorageDecl;
abi_slab, ty::TyAbiDecl;
constant_slab, ty::TyConstantDecl;
configurable_slab, ty::TyConfigurableDecl;
enum_slab, ty::TyEnumDecl;
type_alias_slab, ty::TyTypeAliasDecl;
);

impl DeclEngine {
/// Given a [DeclRef] `index`, finds all the parents of `index` and all the
/// recursive parents of those parents, and so on. Does not perform
Expand Down
42 changes: 41 additions & 1 deletion sway-core/src/decl_engine/parsed_engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use crate::{
};

use std::sync::Arc;
use sway_types::{ProgramId, Spanned};
use sway_types::{ProgramId, SourceId, Spanned};

use super::parsed_id::ParsedDeclId;

Expand Down Expand Up @@ -167,6 +167,46 @@ decl_engine_clear_program!(
.span()),
);

macro_rules! decl_engine_clear_module {
($(($slab:ident, $getter:expr)),* $(,)?) => {
impl ParsedDeclEngine {
pub fn clear_module(&mut self, program_id: &SourceId) {
$(
self.$slab.retain(|_k, item| {
#[allow(clippy::redundant_closure_call)]
let span = $getter(item);
match span.source_id() {
Some(src_id) => src_id != program_id,
None => true,
}
});
)*
}
}
};
}

decl_engine_clear_module!(
(variable_slab, |item: &VariableDeclaration| item.name.span()),
(function_slab, |item: &FunctionDeclaration| item.name.span()),
(trait_slab, |item: &TraitDeclaration| item.name.span()),
(trait_fn_slab, |item: &TraitFn| item.name.span()),
(trait_type_slab, |item: &TraitTypeDeclaration| item
.name
.span()),
(impl_self_or_trait_slab, |item: &ImplSelfOrTrait| item
.block_span
.clone()),
(struct_slab, |item: &StructDeclaration| item.name.span()),
(storage_slab, |item: &StorageDeclaration| item.span.clone()),
(abi_slab, |item: &AbiDeclaration| item.name.span()),
(constant_slab, |item: &ConstantDeclaration| item.name.span()),
(enum_slab, |item: &EnumDeclaration| item.name.span()),
(type_alias_slab, |item: &TypeAliasDeclaration| item
.name
.span()),
);

impl ParsedDeclEngine {
/// Friendly helper method for calling the `get` method from the
/// implementation of [ParsedDeclEngineGet] for [ParsedDeclEngine]
Expand Down
8 changes: 8 additions & 0 deletions sway-core/src/engine_threading.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,14 @@ impl Engines {
self.parsed_decl_engine.clear_program(program_id);
}

/// Removes all data associated with `source_id` from the declaration and type engines.
/// It is intended to be used during garbage collection to remove any data that is no longer needed.
pub fn clear_module(&mut self, source_id: &sway_types::SourceId) {
self.type_engine.clear_module(source_id);
self.decl_engine.clear_module(source_id);
self.parsed_decl_engine.clear_module(source_id);
}

/// Helps out some `thing: T` by adding `self` as context.
pub fn help_out<T>(&self, thing: T) -> WithEngines<'_, T> {
WithEngines {
Expand Down
2 changes: 1 addition & 1 deletion sway-core/src/language/ty/module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ pub struct TyModule {

#[derive(Clone, Debug)]
pub struct TySubmodule {
pub module: TyModule,
pub module: Arc<TyModule>,
pub mod_name_span: Span,
}

Expand Down
4 changes: 2 additions & 2 deletions sway-core/src/language/ty/program.rs
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,7 @@ impl CollectTypesMetadata for TyProgram {
for module in std::iter::once(&self.root).chain(
self.root
.submodules_recursive()
.map(|(_, submod)| &submod.module),
.map(|(_, submod)| &*submod.module),
) {
for node in module.all_nodes.iter() {
let is_generic_function = node.is_generic_function(decl_engine);
Expand Down Expand Up @@ -531,7 +531,7 @@ impl CollectTypesMetadata for TyProgram {
for module in std::iter::once(&self.root).chain(
self.root
.submodules_recursive()
.map(|(_, submod)| &submod.module),
.map(|(_, submod)| &*submod.module),
) {
for node in module.all_nodes.iter() {
if node.is_test_function(decl_engine) {
Expand Down
143 changes: 87 additions & 56 deletions sway-core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ use control_flow_analysis::ControlFlowGraph;
pub use debug_generation::write_dwarf;
use indexmap::IndexMap;
use metadata::MetadataManager;
use query_engine::{ModuleCacheKey, ModulePath, ProgramsCacheEntry};
use query_engine::{ModuleCacheKey, ModuleCommonInfo, ParsedModuleInfo, ProgramsCacheEntry};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::path::{Path, PathBuf};
Expand Down Expand Up @@ -236,7 +236,7 @@ fn parse_in_memory(

pub struct Submodule {
name: Ident,
path: ModulePath,
path: Arc<PathBuf>,
lexed: lexed::LexedSubmodule,
parsed: parsed::ParseSubmodule,
}
Expand All @@ -259,7 +259,6 @@ fn parse_submodules(
) -> Submodules {
// Assume the happy path, so there'll be as many submodules as dependencies, but no more.
let mut submods = Vec::with_capacity(module.submodules().count());

module.submodules().for_each(|submod| {
// Read the source code from the dependency.
// If we cannot, record as an error, but continue with other files.
Expand All @@ -275,7 +274,6 @@ fn parse_submodules(
return;
}
};

if let Ok(ParsedModuleTree {
tree_type: kind,
lexed_module,
Expand Down Expand Up @@ -318,7 +316,6 @@ fn parse_submodules(
submods.push(submodule);
}
});

submods
}

Expand Down Expand Up @@ -411,15 +408,19 @@ fn parse_module_tree(
let version = lsp_mode
.and_then(|lsp| lsp.file_versions.get(path.as_ref()).copied())
.unwrap_or(None);
let cache_entry = ModuleCacheEntry {
path,
modified_time,
hash,
dependencies,

let common_info = ModuleCommonInfo {
path: path.clone(),
include_tests,
dependencies,
hash,
};
let parsed_info = ParsedModuleInfo {
modified_time,
version,
};
query_engine.insert_parse_module_cache_entry(cache_entry);
let cache_entry = ModuleCacheEntry::new(common_info, parsed_info);
query_engine.update_or_insert_parsed_module_cache_entry(cache_entry);

Ok(ParsedModuleTree {
tree_type: kind,
Expand All @@ -428,59 +429,89 @@ fn parse_module_tree(
})
}

fn is_parse_module_cache_up_to_date(
/// Checks if the typed module cache for a given path is up to date.
///
/// This function determines whether the cached typed representation of a module
/// is still valid based on file versions and dependencies.
///
/// Note: This functionality is currently only supported when the compiler is
/// initiated from the language server.
pub(crate) fn is_ty_module_cache_up_to_date(
engines: &Engines,
path: &Arc<PathBuf>,
include_tests: bool,
build_config: Option<&BuildConfig>,
) -> bool {
let query_engine = engines.qe();
let cache = engines.qe().module_cache.read();
let key = ModuleCacheKey::new(path.clone(), include_tests);
let entry = query_engine.get_parse_module_cache_entry(&key);
match entry {
Some(entry) => {
// Let's check if we can re-use the dependency information
// we got from the cache.
cache.get(&key).map_or(false, |entry| {
entry.typed.as_ref().map_or(false, |typed| {
// Check if the cache is up to date based on file versions
let cache_up_to_date = build_config
.as_ref()
.and_then(|x| x.lsp_mode.as_ref())
.and_then(|lsp| {
// First try to get the file version from lsp if it exists
lsp.file_versions.get(path.as_ref())
})
.map_or_else(
|| {
// Otherwise we can safely read the file from disk here, as the LSP has not modified it, or we are not in LSP mode.
// Check if the file has been modified or if its hash is the same as the last compilation
let modified_time = std::fs::metadata(path.as_path())
.ok()
.and_then(|m| m.modified().ok());
entry.modified_time == modified_time || {
let src = std::fs::read_to_string(path.as_path()).unwrap();
let mut hasher = DefaultHasher::new();
src.hash(&mut hasher);
let hash = hasher.finish();
hash == entry.hash
}
},
|version| {
// The cache is invalid if the lsp version is greater than the last compilation
!version.map_or(false, |v| v > entry.version.unwrap_or(0))
},
);

// Look at the dependencies recursively to make sure they have not been
// modified either.
if cache_up_to_date {
entry.dependencies.iter().all(|path| {
is_parse_module_cache_up_to_date(engines, path, include_tests, build_config)
.and_then(|lsp| lsp.file_versions.get(path.as_ref()))
.map_or(true, |version| {
version.map_or(true, |v| typed.version.map_or(false, |tv| v <= tv))
});

// If the cache is up to date, recursively check all dependencies
cache_up_to_date
&& entry.common.dependencies.iter().all(|dep_path| {
is_ty_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
})
} else {
false
}
}
None => false,
}
})
})
}

/// Checks if the parsed module cache for a given path is up to date.
///
/// This function determines whether the cached parsed representation of a module
/// is still valid based on file versions, modification times, or content hashes.
pub(crate) fn is_parse_module_cache_up_to_date(
engines: &Engines,
path: &Arc<PathBuf>,
include_tests: bool,
build_config: Option<&BuildConfig>,
) -> bool {
let cache = engines.qe().module_cache.read();
let key = ModuleCacheKey::new(path.clone(), include_tests);
cache.get(&key).map_or(false, |entry| {
// Determine if the cached dependency information is still valid
let cache_up_to_date = build_config
.and_then(|x| x.lsp_mode.as_ref())
.and_then(|lsp| lsp.file_versions.get(path.as_ref()))
.map_or_else(
|| {
// If LSP mode is not active or file version is unavailable, fall back to filesystem checks.
let modified_time = std::fs::metadata(path.as_path())
.ok()
.and_then(|m| m.modified().ok());
// Check if modification time matches, or if not, compare file content hash
entry.parsed.modified_time == modified_time || {
let src = std::fs::read_to_string(path.as_path()).unwrap();
let mut hasher = DefaultHasher::new();
src.hash(&mut hasher);
hasher.finish() == entry.common.hash
}
},
|version| {
// Determine if the parse cache is up-to-date in LSP mode:
// - If there's no LSP file version (version is None), consider the cache up-to-date.
// - If there is an LSP file version:
// - If there's no cached version (entry.parsed.version is None), the cache is outdated.
// - If there's a cached version, compare them: cache is up-to-date if the LSP file version
// is not greater than the cached version.
version.map_or(true, |v| entry.parsed.version.map_or(false, |ev| v <= ev))
},
);

// Checks if the typed module cache for a given path is up to date// If the cache is up to date, recursively check all dependencies to make sure they have not been
// modified either.
cache_up_to_date
&& entry.common.dependencies.iter().all(|dep_path| {
is_parse_module_cache_up_to_date(engines, dep_path, include_tests, build_config)
})
})
}

fn module_path(
Expand Down Expand Up @@ -697,12 +728,12 @@ pub fn compile_to_ast(
retrigger_compilation: Option<Arc<AtomicBool>>,
) -> Result<Programs, ErrorEmitted> {
check_should_abort(handler, retrigger_compilation.clone())?;

let query_engine = engines.qe();
let mut metrics = PerformanceData::default();
if let Some(config) = build_config {
let path = config.canonical_root_module();
let include_tests = config.include_tests;

// Check if we can re-use the data in the cache.
if is_parse_module_cache_up_to_date(engines, &path, include_tests, build_config) {
let mut entry = query_engine.get_programs_cache_entry(&path).unwrap();
Expand Down
Loading

0 comments on commit ff415cc

Please sign in to comment.