Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Significant TokenMap optimisation by removing clones and returning refs instead. #5509

Merged
merged 7 commits into from
Jan 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 13 additions & 14 deletions sway-lsp/src/capabilities/code_actions/diagnostic/auto_import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,19 +40,18 @@ pub(crate) fn import_code_action(
let mut include_statements = Vec::<TyIncludeStatement>::new();
let mut program_type_keyword = None;

ctx.tokens
.tokens_for_file(ctx.temp_uri)
.for_each(|(_, token)| {
if let Some(TypedAstToken::TypedUseStatement(use_stmt)) = token.typed {
use_statements.push(use_stmt);
} else if let Some(TypedAstToken::TypedIncludeStatement(include_stmt)) = token.typed {
include_statements.push(include_stmt);
} else if token.kind == SymbolKind::ProgramTypeKeyword {
if let AstToken::Keyword(ident) = token.parsed {
program_type_keyword = Some(ident);
}
ctx.tokens.tokens_for_file(ctx.temp_uri).for_each(|item| {
if let Some(TypedAstToken::TypedUseStatement(use_stmt)) = &item.value().typed {
use_statements.push(use_stmt.clone());
} else if let Some(TypedAstToken::TypedIncludeStatement(include_stmt)) = &item.value().typed
{
tritao marked this conversation as resolved.
Show resolved Hide resolved
include_statements.push(include_stmt.clone());
} else if item.value().kind == SymbolKind::ProgramTypeKeyword {
if let AstToken::Keyword(ident) = &item.value().parsed {
program_type_keyword = Some(ident.clone());
}
});
}
});

// Create a list of code actions, one for each potential call path.
let actions = call_paths
Expand Down Expand Up @@ -95,9 +94,9 @@ pub(crate) fn get_call_paths_for_name<'s>(
let mut call_paths = ctx
.tokens
.tokens_for_name(symbol_name)
.filter_map(move |(_, token)| {
.filter_map(move |item| {
// If the typed token is a declaration, then we can import it.
match token.typed.as_ref() {
match item.value().typed.as_ref() {
Some(TypedAstToken::TypedDeclaration(ty_decl)) => {
return match ty_decl {
TyDecl::StructDecl(decl) => {
Expand Down
5 changes: 3 additions & 2 deletions sway-lsp/src/capabilities/code_actions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,15 @@ pub fn code_actions(
temp_uri: &Url,
diagnostics: &Vec<Diagnostic>,
) -> Option<CodeActionResponse> {
let (_, token) = session
let t = session
.token_map()
.token_at_position(temp_uri, range.start)?;
let token = t.value();
tritao marked this conversation as resolved.
Show resolved Hide resolved

let ctx = CodeActionContext {
engines: &session.engines.read(),
tokens: session.token_map(),
token: &token,
token,
uri,
temp_uri,
diagnostics,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ impl<'a> CodeAction<'a, TyStructDecl> for StructNewCodeAction<'a> {
.tokens
.iter()
.all_references_of_token(ctx.token, ctx.engines)
.find_map(|(_, token)| {
.find_map(|item| {
if let Some(TypedAstToken::TypedDeclaration(ty::TyDecl::ImplTrait(
ty::ImplTrait { decl_id, .. },
))) = token.typed
))) = item.value().typed
{
Some((*ctx.engines.de().get_impl_trait(&decl_id)).clone())
} else {
Expand Down
19 changes: 9 additions & 10 deletions sway-lsp/src/capabilities/document_symbol.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
use crate::core::token::{SymbolKind, Token, TokenIdent};
use dashmap::mapref::multiple::RefMulti;
use lsp_types::{self, Location, SymbolInformation, Url};

pub fn to_symbol_information<I>(tokens: I, url: Url) -> Vec<SymbolInformation>
pub fn to_symbol_information<'a, I>(tokens: I, url: Url) -> Vec<SymbolInformation>
where
I: Iterator<Item = (TokenIdent, Token)>,
I: Iterator<Item = RefMulti<'a, TokenIdent, Token>>,
{
let mut symbols: Vec<SymbolInformation> = vec![];

for (ident, token) in tokens {
let symbol = symbol_info(&ident, &token, url.clone());
symbols.push(symbol)
}

symbols
tokens
.map(|entry| {
let (ident, token) = entry.pair();
symbol_info(ident, token, url.clone())
})
.collect()
}

/// Given a `token::SymbolKind`, return the `lsp_types::SymbolKind` that corresponds to it.
Expand Down
27 changes: 12 additions & 15 deletions sway-lsp/src/capabilities/hover/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ pub fn hover_data(
url: Url,
position: Position,
) -> Option<lsp_types::Hover> {
let (ident, token) = session.token_map().token_at_position(&url, position)?;
let t = session.token_map().token_at_position(&url, position)?;
let (ident, token) = t.pair();
let range = ident.range;

// check if our token is a keyword
Expand All @@ -50,26 +51,22 @@ pub fn hover_data(
});
}

let (decl_ident, decl_token) = match token.declared_token_ident(&session.engines.read()) {
let contents = match &token.declared_token_ident(&session.engines.read()) {
Some(decl_ident) => {
let decl_token = session
.token_map()
.try_get(&decl_ident)
.try_unwrap()
.map(|item| item.value().clone())?;
(decl_ident, decl_token)
let t = session.token_map().try_get(decl_ident).try_unwrap()?;
let decl_token = t.value();
hover_format(
session.clone(),
&session.engines.read(),
decl_token,
&decl_ident.name,
)
}
// The `TypeInfo` of the token does not contain an `Ident`. In this case,
// we use the `Ident` of the token itself.
None => (ident, token),
None => hover_format(session.clone(), &session.engines.read(), token, &ident.name),
};

let contents = hover_format(
session.clone(),
&session.engines.read(),
&decl_token,
&decl_ident.name,
);
Some(lsp_types::Hover {
contents,
range: Some(range),
Expand Down
3 changes: 2 additions & 1 deletion sway-lsp/src/capabilities/inlay_hints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ pub fn inlay_hints(
let hints: Vec<lsp_types::InlayHint> = session
.token_map()
.tokens_for_file(uri)
.filter_map(|(_, token)| {
.filter_map(|item| {
let token = item.value();
token.typed.as_ref().and_then(|t| match t {
TypedAstToken::TypedDeclaration(TyDecl::VariableDecl(var_decl)) => {
match var_decl.type_ascription.call_path_tree {
Expand Down
42 changes: 23 additions & 19 deletions sway-lsp/src/capabilities/rename.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,11 @@ pub fn rename(
}

// Get the token at the current cursor position
let (_, token) = session
let t = session
JoshuaBatty marked this conversation as resolved.
Show resolved Hide resolved
.token_map()
.token_at_position(&url, position)
.ok_or(RenameError::TokenNotFound)?;
let token = t.value();
tritao marked this conversation as resolved.
Show resolved Hide resolved

// We don't currently allow renaming of module names.
if token.kind == SymbolKind::Module {
Expand All @@ -57,8 +58,8 @@ pub fn rename(
session
.token_map()
.iter()
.all_references_of_token(&token, &session.engines.read())
.map(|(ident, _)| ident)
.all_references_of_token(token, &session.engines.read())
.map(|item| item.key().clone())
.collect::<Vec<TokenIdent>>()
})
.into_iter()
Expand All @@ -72,8 +73,8 @@ pub fn rename(
// taking the r# tokens into account.
range.start.character -= RAW_IDENTIFIER.len() as u32;
}
if let Some(path) = ident.path {
let url = get_url_from_path(&path).ok()?;
if let Some(path) = &ident.path {
let url = get_url_from_path(path).ok()?;
if let Some(url) = session.sync.to_workspace_url(url) {
let edit = TextEdit::new(range, new_name.clone());
return Some((url, vec![edit]));
Expand Down Expand Up @@ -101,14 +102,15 @@ pub fn prepare_rename(
url: Url,
position: Position,
) -> Result<PrepareRenameResponse, LanguageServerError> {
let (ident, token) = session
let t = session
.token_map()
.token_at_position(&url, position)
.ok_or(RenameError::TokenNotFound)?;
let (ident, token) = t.pair();
tritao marked this conversation as resolved.
Show resolved Hide resolved

// Only let through tokens that are in the users workspace.
// tokens that are external to the users workspace cannot be renamed.
let _ = is_token_in_workspace(&session, &session.engines.read(), &token)?;
let _ = is_token_in_workspace(&session, &session.engines.read(), token)?;

// Make sure we don't allow renaming of tokens that
// are keywords or intrinsics.
Expand All @@ -128,7 +130,7 @@ pub fn prepare_rename(

Ok(PrepareRenameResponse::RangeWithPlaceholder {
range: ident.range,
placeholder: formatted_name(&ident),
placeholder: formatted_name(ident),
})
}

Expand All @@ -154,7 +156,7 @@ fn is_token_in_workspace(

// Check the span of the tokens defintions to determine if it's in the users workspace.
let temp_path = &session.sync.temp_dir()?;
if let Some(path) = decl_ident.path {
if let Some(path) = &decl_ident.path {
if !path.starts_with(temp_path) {
return Err(LanguageServerError::RenameError(
RenameError::TokenNotPartOfWorkspace,
Expand All @@ -166,9 +168,9 @@ fn is_token_in_workspace(
}

/// Returns a `Vec<Ident>` containing the identifiers of all trait functions found.
fn trait_interface_idents(
interface_surface: &[ty::TyTraitInterfaceItem],
se: &SourceEngine,
fn trait_interface_idents<'a>(
interface_surface: &'a [ty::TyTraitInterfaceItem],
se: &'a SourceEngine,
) -> Vec<TokenIdent> {
interface_surface
.iter()
Expand All @@ -180,23 +182,25 @@ fn trait_interface_idents(
}

/// Returns the `Ident`s of all methods found for an `AbiDecl`, `TraitDecl`, or `ImplTrait`.
fn find_all_methods_for_decl(
session: &Session,
engines: &Engines,
url: &Url,
fn find_all_methods_for_decl<'a>(
session: &'a Session,
engines: &'a Engines,
url: &'a Url,
position: Position,
) -> Result<Vec<TokenIdent>, LanguageServerError> {
// Find the parent declaration
let (_, decl_token) = session
let t = session
.token_map()
.parent_decl_at_position(engines.se(), url, position)
.ok_or(RenameError::TokenNotFound)?;
let decl_token = t.value();
tritao marked this conversation as resolved.
Show resolved Hide resolved

let idents = session
.token_map()
.iter()
.all_references_of_token(&decl_token, engines)
.filter_map(|(_, token)| {
.all_references_of_token(decl_token, engines)
.filter_map(|item| {
let token = item.value();
token.typed.as_ref().and_then(|typed| match typed {
TypedAstToken::TypedDeclaration(decl) => match decl {
ty::TyDecl::AbiDecl(ty::AbiDecl { decl_id, .. }) => {
Expand Down
68 changes: 37 additions & 31 deletions sway-lsp/src/capabilities/semantic_tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use crate::core::{
session::Session,
token::{SymbolKind, Token, TokenIdent},
};
use dashmap::mapref::multiple::RefMulti;
use lsp_types::{
Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
SemanticTokensRangeResult, SemanticTokensResult, Url,
Expand All @@ -15,9 +16,9 @@ use std::sync::{

/// Get the semantic tokens for the entire file.
pub fn semantic_tokens_full(session: Arc<Session>, url: &Url) -> Option<SemanticTokensResult> {
let mut tokens: Vec<_> = session.token_map().tokens_for_file(url).collect();
sort_tokens(&mut tokens);
Some(semantic_tokens(&tokens).into())
let tokens: Vec<_> = session.token_map().tokens_for_file(url).collect();
let sorted_tokens_refs = sort_tokens(&tokens);
Some(semantic_tokens(&sorted_tokens_refs[..]).into())
}

/// Get the semantic tokens within a range.
Expand All @@ -26,30 +27,52 @@ pub fn semantic_tokens_range(
url: &Url,
range: &Range,
) -> Option<SemanticTokensRangeResult> {
let mut tokens: Vec<_> = session
let tokens: Vec<_> = session
.token_map()
.tokens_for_file(url)
.filter(|t| {
.filter(|item| {
// make sure the token_ident range is within the range that was passed in
let token_range = t.0.range;
let token_range = item.key().range;
token_range.start >= range.start && token_range.end <= range.end
})
.collect();
sort_tokens(&mut tokens);
Some(semantic_tokens(&tokens).into())
let sorted_tokens_refs = sort_tokens(&tokens);
Some(semantic_tokens(&sorted_tokens_refs[..]).into())
}

pub fn semantic_tokens(tokens_sorted: &[&RefMulti<TokenIdent, Token>]) -> SemanticTokens {
static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
let id = TOKEN_RESULT_COUNTER
.fetch_add(1, Ordering::SeqCst)
.to_string();
let mut builder = SemanticTokensBuilder::new(id);

for entry in tokens_sorted.iter() {
let (ident, token) = entry.pair();
let ty = semantic_token_type(&token.kind);
let token_index = type_index(ty);
// TODO - improve with modifiers
let modifier_bitset = 0;
builder.push(ident.range, token_index, modifier_bitset);
}
builder.build()
}

/// Sort tokens by their span so each token is sequential.
///
/// If this step isn't done, then the bit offsets used for the lsp_types::SemanticToken are incorrect.
fn sort_tokens(tokens: &mut [(TokenIdent, Token)]) {
tokens.sort_by(|(a_span, _), (b_span, _)| {
let a = (a_span.range.start, a_span.range.end);
let b = (b_span.range.start, b_span.range.end);
a.cmp(&b)
fn sort_tokens<'a>(
tokens: &'a [RefMulti<'a, TokenIdent, Token>],
) -> Vec<&'a RefMulti<'a, TokenIdent, Token>> {
let mut refs: Vec<_> = tokens.iter().collect();
// Sort the vector of references based on the spans of the tokens
refs.sort_by(|a, b| {
let a_span = a.key().range;
let b_span = b.key().range;
(a_span.start, a_span.end).cmp(&(b_span.start, b_span.end))
});
refs
}

//-------------------------------
/// Tokens are encoded relative to each other.
///
Expand Down Expand Up @@ -108,23 +131,6 @@ impl SemanticTokensBuilder {
}
}

pub fn semantic_tokens(tokens_sorted: &[(TokenIdent, Token)]) -> SemanticTokens {
static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
let id = TOKEN_RESULT_COUNTER
.fetch_add(1, Ordering::SeqCst)
.to_string();
let mut builder = SemanticTokensBuilder::new(id);

for (ident, token) in tokens_sorted.iter() {
let ty = semantic_token_type(&token.kind);
let token_index = type_index(ty);
// TODO - improve with modifiers
let modifier_bitset = 0;
builder.push(ident.range, token_index, modifier_bitset);
}
builder.build()
}

pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
Expand Down
Loading
Loading