Skip to content

Commit

Permalink
[Bug] Don't recalculate first for HashSet
Browse files Browse the repository at this point in the history
What?
=====

Calculating the "first path" of a hash set that's recalculated can't
guarantee order. As such, this captures the paths once, ensuring that
calculating the first from the paths is consistent.
  • Loading branch information
joshuaclayton committed Apr 19, 2020
1 parent c54571d commit 699719d
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 18 deletions.
2 changes: 1 addition & 1 deletion crates/project_configuration/src/value_assertion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ impl Assertion {
match self {
Assertion::PathAssertion(matcher) => token_search_result
.token
.defined_paths()
.defined_paths
.iter()
.any(|path| matcher.check(path)),
Assertion::TokenAssertion(matcher) => matcher.check(&token_search_result.token.token),
Expand Down
8 changes: 4 additions & 4 deletions crates/token_analysis/src/usage_likelihood.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,16 +100,16 @@ mod tests {
use token_search::Token;

fn build_ruby_file(token: &str, path: &str, kind: TokenKind) -> Token {
Token {
token: token.to_string(),
definitions: vec![CtagItem {
Token::new(
token.to_string(),
vec![CtagItem {
name: token.to_string(),
file_path: path.to_string(),
language: Some(Language::Ruby),
tags: HashMap::new(),
kind: kind,
}],
}
)
}

#[test]
Expand Down
26 changes: 14 additions & 12 deletions crates/token_search/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,24 +7,29 @@ use std::collections::HashSet;
pub struct Token {
pub token: String,
pub definitions: Vec<CtagItem>,
pub defined_paths: HashSet<String>,
}

impl Token {
pub fn new(token: String, definitions: Vec<CtagItem>) -> Self {
Self {
token,
definitions: definitions.to_vec(),
defined_paths: definitions
.iter()
.map(|v| v.file_path.to_string())
.collect(),
}
}

pub fn all() -> Result<Vec<Token>, ReadCtagsError> {
TagsReader::default()
.load()
.map(Self::build_tokens_from_outcome)
}

pub fn defined_paths(&self) -> HashSet<String> {
self.definitions
.iter()
.map(|v| v.file_path.to_string())
.collect()
}

pub fn first_path(&self) -> String {
self.defined_paths().iter().nth(0).unwrap().to_string()
self.defined_paths.iter().nth(0).unwrap().to_string()
}

pub fn languages(&self) -> Vec<Language> {
Expand All @@ -44,10 +49,7 @@ impl Token {
.sorted_by_key(|ct| Self::strip_prepended_punctuation(&ct.name))
.group_by(|ct| Self::strip_prepended_punctuation(&ct.name))
.into_iter()
.map(|(token, cts)| Token {
token,
definitions: cts.collect(),
})
.map(|(token, cts)| Token::new(token, cts.collect()))
.collect()
}

Expand Down
2 changes: 1 addition & 1 deletion crates/token_search/src/token_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ pub struct TokenSearchResult {

impl TokenSearchResult {
pub fn defined_paths(&self) -> HashSet<String> {
self.token.defined_paths()
self.token.defined_paths.clone()
}

pub fn occurred_paths(&self) -> HashSet<String> {
Expand Down

0 comments on commit 699719d

Please sign in to comment.