Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enforce rustfmt format #12

Merged
merged 3 commits into from
Jan 27, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
language: rust
rust:
- stable

matrix:
include:

- rust: nightly-2018-01-26
before_script:
- rustup component add rustfmt-preview
script:
- cargo fmt -- --write-mode=diff

- rust: stable
script:
- cargo test
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ file = "1.1.1"
ron = "0.1.5"

[dev-dependencies]
testutils = { path = "./tests/testutils" }
testutils = { path = "./tests/testutils" }
Empty file added rustfmt.toml
Empty file.
21 changes: 13 additions & 8 deletions src/bin/gen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ extern crate serde;
#[macro_use]
extern crate serde_derive;

extern crate ron;
extern crate file;
extern crate ron;

use std::path::PathBuf;
use std::fmt::Write;
Expand Down Expand Up @@ -33,11 +33,12 @@ impl Grammar {
acc.push_str("use tree::{SyntaxKind, SyntaxInfo};\n");
acc.push_str("\n");

let syntax_kinds: Vec<String> =
self.keywords.iter().map(|kw| kw_token(kw))
.chain(self.tokens.iter().cloned())
.chain(self.nodes.iter().cloned())
.collect();
let syntax_kinds: Vec<String> = self.keywords
.iter()
.map(|kw| kw_token(kw))
.chain(self.tokens.iter().cloned())
.chain(self.nodes.iter().cloned())
.collect();

for (idx, kind) in syntax_kinds.iter().enumerate() {
let sname = scream(kind);
Expand All @@ -48,7 +49,11 @@ impl Grammar {
).unwrap();
}
acc.push_str("\n");
write!(acc, "static INFOS: [SyntaxInfo; {}] = [\n", syntax_kinds.len()).unwrap();
write!(
acc,
"static INFOS: [SyntaxInfo; {}] = [\n",
syntax_kinds.len()
).unwrap();
for kind in syntax_kinds.iter() {
let sname = scream(kind);
write!(
Expand Down Expand Up @@ -91,4 +96,4 @@ fn scream(word: &str) -> String {

fn kw_token(keyword: &str) -> String {
format!("{}_KW", scream(keyword))
}
}
2 changes: 1 addition & 1 deletion src/bin/parse-rust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ extern crate libsyntax2;

use std::io::Read;

use libsyntax2::{tokenize, parse};
use libsyntax2::{parse, tokenize};
use libsyntax2::utils::dump_tree;

fn main() {
Expand Down
9 changes: 2 additions & 7 deletions src/lexer/classes.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
use unicode_xid::UnicodeXID;

pub fn is_ident_start(c: char) -> bool {
(c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z')
|| c == '_'
(c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_'
|| (c > '\x7f' && UnicodeXID::is_xid_start(c))
}

pub fn is_ident_continue(c: char) -> bool {
(c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z')
|| (c >= '0' && c <= '9')
|| c == '_'
(c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_'
|| (c > '\x7f' && UnicodeXID::is_xid_continue(c))
}

Expand Down
5 changes: 2 additions & 3 deletions src/lexer/comments.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use lexer::ptr::Ptr;

use {SyntaxKind};
use SyntaxKind;
use syntax_kinds::*;

pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool {
Expand All @@ -23,7 +23,6 @@ pub(crate) fn scan_comment(ptr: &mut Ptr) -> Option<SyntaxKind> {
}
}


fn bump_until_eol(ptr: &mut Ptr) {
loop {
if ptr.next_is('\n') || ptr.next_is('\r') && ptr.nnext_is('\n') {
Expand All @@ -33,4 +32,4 @@ fn bump_until_eol(ptr: &mut Ptr) {
break;
}
}
}
}
145 changes: 79 additions & 66 deletions src/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use {Token, SyntaxKind};
use {SyntaxKind, Token};
use syntax_kinds::*;

mod ptr;
Expand All @@ -11,10 +11,11 @@ mod numbers;
use self::numbers::scan_number;

mod strings;
use self::strings::{is_string_literal_start, scan_char, scan_byte_char_or_string, scan_string, scan_raw_string};
use self::strings::{is_string_literal_start, scan_byte_char_or_string, scan_char, scan_raw_string,
scan_string};

mod comments;
use self::comments::{scan_shebang, scan_comment};
use self::comments::{scan_comment, scan_shebang};

pub fn tokenize(text: &str) -> Vec<Token> {
let mut text = text;
Expand Down Expand Up @@ -45,10 +46,10 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
match c {
'#' => if scan_shebang(ptr) {
return SHEBANG;
}
},
'/' => if let Some(kind) = scan_comment(ptr) {
return kind;
}
},
_ => (),
}

Expand Down Expand Up @@ -89,79 +90,91 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
'%' => return PERCENT,

// Multi-byte tokens.
'.' => return match (ptr.next(), ptr.nnext()) {
(Some('.'), Some('.')) => {
ptr.bump();
ptr.bump();
DOTDOTDOT
},
(Some('.'), Some('=')) => {
ptr.bump();
ptr.bump();
DOTDOTEQ
},
(Some('.'), _) => {
ptr.bump();
DOTDOT
},
_ => DOT
},
':' => return match ptr.next() {
Some(':') => {
ptr.bump();
COLONCOLON
'.' => {
return match (ptr.next(), ptr.nnext()) {
(Some('.'), Some('.')) => {
ptr.bump();
ptr.bump();
DOTDOTDOT
}
(Some('.'), Some('=')) => {
ptr.bump();
ptr.bump();
DOTDOTEQ
}
(Some('.'), _) => {
ptr.bump();
DOTDOT
}
_ => DOT,
}
_ => COLON
},
'=' => return match ptr.next() {
Some('=') => {
ptr.bump();
EQEQ
}
':' => {
return match ptr.next() {
Some(':') => {
ptr.bump();
COLONCOLON
}
_ => COLON,
}
Some('>') => {
ptr.bump();
FAT_ARROW
}
'=' => {
return match ptr.next() {
Some('=') => {
ptr.bump();
EQEQ
}
Some('>') => {
ptr.bump();
FAT_ARROW
}
_ => EQ,
}
_ => EQ,
},
'!' => return match ptr.next() {
Some('=') => {
}
'!' => {
return match ptr.next() {
Some('=') => {
ptr.bump();
NEQ
}
_ => EXCL,
}
}
'-' => {
return if ptr.next_is('>') {
ptr.bump();
NEQ
THIN_ARROW
} else {
MINUS
}
_ => EXCL,
},
'-' => return if ptr.next_is('>') {
ptr.bump();
THIN_ARROW
} else {
MINUS
},
}

// If the character is an ident start not followed by another single
// quote, then this is a lifetime name:
'\'' => return if ptr.next_is_p(is_ident_start) && !ptr.nnext_is('\'') {
ptr.bump();
while ptr.next_is_p(is_ident_continue) {
ptr.bump();
}
// lifetimes shouldn't end with a single quote
// if we find one, then this is an invalid character literal
if ptr.next_is('\'') {
'\'' => {
return if ptr.next_is_p(is_ident_start) && !ptr.nnext_is('\'') {
ptr.bump();
return CHAR; // TODO: error reporting
}
LIFETIME
} else {
scan_char(ptr);
scan_literal_suffix(ptr);
CHAR
},
while ptr.next_is_p(is_ident_continue) {
ptr.bump();
}
// lifetimes shouldn't end with a single quote
// if we find one, then this is an invalid character literal
if ptr.next_is('\'') {
ptr.bump();
return CHAR; // TODO: error reporting
}
LIFETIME
} else {
scan_char(ptr);
scan_literal_suffix(ptr);
CHAR
};
}
'b' => {
let kind = scan_byte_char_or_string(ptr);
scan_literal_suffix(ptr);
return kind
},
return kind;
}
'"' => {
scan_string(ptr);
scan_literal_suffix(ptr);
Expand Down
6 changes: 3 additions & 3 deletions src/lexer/numbers.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use lexer::ptr::Ptr;
use lexer::classes::*;

use {SyntaxKind};
use SyntaxKind;
use syntax_kinds::*;

pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind {
Expand Down Expand Up @@ -49,10 +49,10 @@ fn scan_digits(ptr: &mut Ptr, allow_hex: bool) {
'_' | '0'...'9' => {
ptr.bump();
}
'a'...'f' | 'A' ... 'F' if allow_hex => {
'a'...'f' | 'A'...'F' if allow_hex => {
ptr.bump();
}
_ => return
_ => return,
}
}
}
Expand Down
11 changes: 7 additions & 4 deletions src/lexer/ptr.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use {TextUnit};
use TextUnit;

use std::str::Chars;

Expand All @@ -9,7 +9,10 @@ pub(crate) struct Ptr<'s> {

impl<'s> Ptr<'s> {
pub fn new(text: &'s str) -> Ptr<'s> {
Ptr { text, len: TextUnit::new(0) }
Ptr {
text,
len: TextUnit::new(0),
}
}

pub fn into_len(self) -> TextUnit {
Expand Down Expand Up @@ -53,7 +56,7 @@ impl<'s> Ptr<'s> {
match self.next() {
Some(c) if pred(c) => {
self.bump();
},
}
_ => return,
}
}
Expand All @@ -66,6 +69,6 @@ impl<'s> Ptr<'s> {

fn chars(&self) -> Chars {
let len: u32 = self.len.into();
self.text[len as usize ..].chars()
self.text[len as usize..].chars()
}
}
Loading