Skip to content

Commit

Permalink
feat: precommit script
Browse files Browse the repository at this point in the history
  • Loading branch information
elijah-potter committed Feb 25, 2024
1 parent 9986ccd commit 377d199
Show file tree
Hide file tree
Showing 50 changed files with 551 additions and 486 deletions.
22 changes: 22 additions & 0 deletions .github/workflows/precommit.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: Precommit

on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]

env:
CARGO_TERM_COLOR: always

jobs:
precommit:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- name: Install Rust Nightly
run: rustup toolchain install nightly && rustup component add rustfmt --toolchain nightly
- name: Precommit
run: ./precommit.sh
25 changes: 0 additions & 25 deletions .github/workflows/rust-tests.yml

This file was deleted.

5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
<h1>Harper</h1>
</div>

[![Build](https://github.com/chilipepperhott/harper/actions/workflows/build.yml/badge.svg)](https://github.com/chilipepperhott/harper/actions/workflows/build.yml)
[![Rust Tests](https://github.com/chilipepperhott/harper/actions/workflows/rust-tests.yml/badge.svg)](https://github.com/chilipepperhott/harper/actions/workflows/rust-tests.yml)
[![Harper LS](https://github.com/chilipepperhott/harper/actions/workflows/build_harper_ls.yml/badge.svg)](https://github.com/chilipepperhott/harper/actions/workflows/build_harper_ls.yml)
[![Web](https://github.com/chilipepperhott/harper/actions/workflows/build_web.yml/badge.svg)](https://github.com/chilipepperhott/harper/actions/workflows/build_web.yml)
[![Precommit](https://github.com/chilipepperhott/harper/actions/workflows/precommit.yml/badge.svg)](https://github.com/chilipepperhott/harper/actions/workflows/precommit.yml)

Harper is a an English grammar checker designed to be _just right._
I created it after years of dealing with the shortcomings of the competition.
Expand Down
40 changes: 18 additions & 22 deletions harper-core/src/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,16 @@ use std::fmt::Display;

use itertools::Itertools;

use crate::linting::Suggestion;
use crate::parsers::{Markdown, Parser, PlainEnglish};
use crate::TokenStringExt;
use crate::{
linting::Suggestion,
span::Span,
FatToken,
Punctuation::{self},
Token, TokenKind,
};
use crate::span::Span;
use crate::Punctuation::{self};
use crate::{FatToken, Token, TokenKind, TokenStringExt};

pub struct Document {
source: Vec<char>,
tokens: Vec<Token>,
parser: Box<dyn Parser>,
parser: Box<dyn Parser>
}

impl Default for Document {
Expand All @@ -33,7 +29,7 @@ impl Document {
let mut doc = Self {
source,
tokens: Vec::new(),
parser,
parser
};
doc.parse();

Expand Down Expand Up @@ -137,7 +133,7 @@ impl Document {
pub fn get_full_string(&self) -> String {
self.get_span_content_str(Span {
start: 0,
end: self.source.len(),
end: self.source.len()
})
}

Expand Down Expand Up @@ -165,8 +161,9 @@ impl Document {
self.parse();
}

/// Searches for quotation marks and fills the [`Punctuation::Quote::twin_loc`] field.
/// This is on a best effort basis.
/// Searches for quotation marks and fills the
/// [`Punctuation::Quote::twin_loc`] field. This is on a best effort
/// basis.
///
/// Current algorithm is very basic and could use some work.
fn match_quotes(&mut self) {
Expand All @@ -188,7 +185,8 @@ impl Document {
}
}

/// Searches for contractions and condenses them down into single self.tokens
/// Searches for contractions and condenses them down into single
/// self.tokens
fn condense_contractions(&mut self) {
if self.tokens.len() < 3 {
return;
Expand Down Expand Up @@ -233,7 +231,7 @@ impl Document {
&old[0..replace_starts
.first()
.copied()
.unwrap_or(replace_starts.len())],
.unwrap_or(replace_starts.len())]
);

let mut iter = replace_starts.iter().peekable();
Expand All @@ -251,7 +249,7 @@ impl Document {
&old[replace_starts
.last()
.map(|v| v + 3)
.unwrap_or(replace_starts.len())..],
.unwrap_or(replace_starts.len())..]
)
}
}
Expand Down Expand Up @@ -321,21 +319,19 @@ fn is_sentence_terminator(token: &TokenKind) -> bool {
TokenKind::Punctuation(punct) => [
Punctuation::Period,
Punctuation::Bang,
Punctuation::Question,
Punctuation::Question
]
.contains(punct),
TokenKind::Newline(_) => true,
_ => false,
_ => false
}
}

#[cfg(test)]
mod tests {
use super::Document;
use crate::{
parsers::{Markdown, PlainEnglish},
token::TokenStringExt,
};
use crate::parsers::{Markdown, PlainEnglish};
use crate::token::TokenStringExt;

#[test]
fn parses_sentences_correctly() {
Expand Down
23 changes: 10 additions & 13 deletions harper-core/src/lexing/email_address.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
use itertools::Itertools;

use crate::TokenKind;

use super::FoundToken;
use crate::TokenKind;

pub fn lex_email_address(source: &[char]) -> Option<FoundToken> {
// Location of the @ sign
Expand Down Expand Up @@ -31,7 +30,7 @@ pub fn lex_email_address(source: &[char]) -> Option<FoundToken> {

Some(FoundToken {
next_index: at_loc + 1 + domain_part_len,
token: TokenKind::EmailAddress,
token: TokenKind::EmailAddress
})
}

Expand Down Expand Up @@ -103,7 +102,7 @@ fn valid_unquoted_character(c: char) -> bool {

let others = [
'!', '#', '$', '%', '&', '\'', '*', '+', '-', '/', '=', '?', '^', '_', '`', '{', '|', '}',
'~', '.',
'~', '.'
];

if others.contains(&c) {
Expand Down Expand Up @@ -136,9 +135,8 @@ fn validate_hostname(source: &[char]) -> bool {

#[cfg(test)]
mod tests {
use crate::lexing::email_address::validate_hostname;

use super::{lex_email_address, validate_local_part};
use crate::lexing::email_address::validate_hostname;

fn example_local_parts() -> impl Iterator<Item = Vec<char>> {
[
Expand All @@ -158,7 +156,7 @@ mod tests {
r#"user-"#,
r#"postmaster"#,
r#"postmaster"#,
r#"_test"#,
r#"_test"#
]
.into_iter()
.map(|s| s.chars().collect())
Expand All @@ -179,12 +177,11 @@ mod tests {
r#"example.org"#,
r#"strange.example.com"#,
r#"example.org"#,
r#"example.org"#,
// The existing parser intentionally doesn't support IP addresses
// It simply isn't worth the effort at the moment.
// r#"[123.123.123.123]"#,
// r#"[IPv6:2001:0db8:85a3:0000:0000:8a2e:0370:7334]"#,
// r#"[IPv6:2001:0db8:85a3:0000:0000:8a2e:0370:7334]"#,
r#"example.org"# /* The existing parser intentionally doesn't support IP addresses
* It simply isn't worth the effort at the moment.
* r#"[123.123.123.123]"#,
* r#"[IPv6:2001:0db8:85a3:0000:0000:8a2e:0370:7334]"#,
* r#"[IPv6:2001:0db8:85a3:0000:0000:8a2e:0370:7334]"#, */
]
.into_iter()
.map(|s| s.chars().collect())
Expand Down
23 changes: 10 additions & 13 deletions harper-core/src/lexing/mod.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
mod email_address;

use crate::token::Quote;

use crate::token::{Punctuation, TokenKind};

use self::email_address::lex_email_address;
use crate::token::{Punctuation, Quote, TokenKind};

#[derive(Debug)]
pub struct FoundToken {
/// The index of the character __after__ the lexed token
pub next_index: usize,
/// Token lexed
pub token: TokenKind,
pub token: TokenKind
}

pub fn lex_token(source: &[char]) -> Option<FoundToken> {
Expand All @@ -21,7 +18,7 @@ pub fn lex_token(source: &[char]) -> Option<FoundToken> {
lex_newlines,
lex_number,
lex_email_address,
lex_word,
lex_word
];

for lexer in lexers {
Expand All @@ -46,7 +43,7 @@ fn lex_word(source: &[char]) -> Option<FoundToken> {
} else {
return Some(FoundToken {
next_index: end + 1,
token: TokenKind::Word,
token: TokenKind::Word
});
}
}
Expand Down Expand Up @@ -78,7 +75,7 @@ pub fn lex_number(source: &[char]) -> Option<FoundToken> {
if let Ok(n) = s.parse::<f64>() {
return Some(FoundToken {
token: TokenKind::Number(n),
next_index: end + 1,
next_index: end + 1
});
}
}
Expand All @@ -92,7 +89,7 @@ fn lex_newlines(source: &[char]) -> Option<FoundToken> {
if count > 0 {
Some(FoundToken {
token: TokenKind::Newline(count),
next_index: count,
next_index: count
})
} else {
None
Expand All @@ -105,7 +102,7 @@ fn lex_spaces(source: &[char]) -> Option<FoundToken> {
if count > 0 {
Some(FoundToken {
token: TokenKind::Space(count),
next_index: count,
next_index: count
})
} else {
None
Expand Down Expand Up @@ -156,12 +153,12 @@ fn lex_punctuation(source: &[char]) -> Option<FoundToken> {
'$' => Dollar,
'|' => Pipe,
'_' => Underscore,
_ => return None,
_ => return None
};

Some(FoundToken {
next_index: 1,
token: TokenKind::Punctuation(punct),
token: TokenKind::Punctuation(punct)
})
}

Expand All @@ -171,7 +168,7 @@ fn lex_quote(source: &[char]) -> Option<FoundToken> {
if c == '\"' || c == '“' || c == '”' {
Some(FoundToken {
next_index: 1,
token: TokenKind::Punctuation(Punctuation::Quote(Quote { twin_loc: None })),
token: TokenKind::Punctuation(Punctuation::Quote(Quote { twin_loc: None }))
})
} else {
None
Expand Down
3 changes: 1 addition & 2 deletions harper-core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@ mod spell;
mod token;

pub use document::Document;
pub use linting::LintSet;
pub use linting::{Lint, LintKind, Linter, Suggestion};
pub use linting::{Lint, LintKind, LintSet, Linter, Suggestion};
pub use span::Span;
pub use spell::{Dictionary, FullDictionary, MergedDictionary};
pub use token::{FatToken, Punctuation, Token, TokenKind, TokenStringExt};
Expand Down
8 changes: 4 additions & 4 deletions harper-core/src/linting/lint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub struct Lint {
pub message: String,
/// A numerical value for the importance of a lint.
/// Lower = more important.
pub priority: u8,
pub priority: u8
}

impl Default for Lint {
Expand All @@ -23,7 +23,7 @@ impl Default for Lint {
lint_kind: Default::default(),
suggestions: Default::default(),
message: Default::default(),
priority: 127,
priority: 127
}
}
}
Expand All @@ -36,12 +36,12 @@ pub enum LintKind {
Repetition,
Readability,
#[default]
Miscellaneous,
Miscellaneous
}

#[derive(Debug, Clone, Serialize, Deserialize, Is)]
pub enum Suggestion {
ReplaceWith(Vec<char>),
ReplaceWith(Vec<char>)
}

impl Display for Suggestion {
Expand Down
Loading

0 comments on commit 377d199

Please sign in to comment.