Skip to content

Commit

Permalink
Auto merge of rust-lang#79944 - sivadeilra:syms_proc_macro_testing, r…
Browse files Browse the repository at this point in the history
…=petrochenkov

Improve error handling in `symbols` proc-macro

This improves how the `symbols` proc-macro handles errors.
If it finds an error in its input, the macro does not panic.
Instead, it still produces an output token stream. That token
stream will contain `compile_error!(...)` macro invocations.
This will still cause compilation to fail (which is what we want),
but it will prevent meaningless errors caused by the output not
containing symbols that the macro normally generates.

This solves a small (but annoying) problem. When you're editing
rustc_span/src/symbol.rs, and you get something wrong (dup
symbol name, misordered symbol), you want to get only the errors
that are relevant, not a burst of errors that are irrelevant.
This change also uses the correct Span when reporting errors,
so you get errors that point to the correct place in
rustc_span/src/symbol.rs where something is wrong.

This also adds several unit tests which test the `symbols` proc-macro.

This commit also makes it easy to run the `symbols` proc-macro
as an ordinary Cargo test. Just run `cargo test`. This makes it
easier to do development on the macro itself, such as running it
under a debugger.

This commit also uses the `Punctuated` type in `syn` for parsing
comma-separated lists, rather than doing it manually.

The output of the macro is not changed at all by this commit,
so rustc should be completely unchanged. This just improves
quality of life during development.
  • Loading branch information
bors committed Dec 14, 2020
2 parents 7feab00 + 1a5b9b0 commit 331e740
Show file tree
Hide file tree
Showing 3 changed files with 204 additions and 54 deletions.
2 changes: 1 addition & 1 deletion compiler/rustc_macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {

#[proc_macro]
pub fn symbols(input: TokenStream) -> TokenStream {
symbols::symbols(input)
symbols::symbols(input.into()).into()
}

decl_derive!([HashStable, attributes(stable_hasher)] => hash_stable::hash_stable_derive);
Expand Down
154 changes: 101 additions & 53 deletions compiler/rustc_macros/src/symbols.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,35 @@
use proc_macro::TokenStream;
//! Proc macro which builds the Symbol table
//!
//! # Debugging
//!
//! Since this proc-macro does some non-trivial work, debugging it is important.
//! This proc-macro can be invoked as an ordinary unit test, like so:
//!
//! ```bash
//! cd compiler/rustc_macros
//! cargo test symbols::test_symbols -- --nocapture
//! ```
//!
//! This unit test finds the `symbols!` invocation in `compiler/rustc_span/src/symbol.rs`
//! and runs it. It verifies that the output token stream can be parsed as valid module
//! items and that no errors were produced.
//!
//! You can also view the generated code by using `cargo expand`:
//!
//! ```bash
//! cargo install cargo-expand # this is necessary only once
//! cd compiler/rustc_span
//! cargo expand > /tmp/rustc_span.rs # it's a big file
//! ```
use proc_macro2::{Span, TokenStream};
use quote::quote;
use std::collections::HashSet;
use std::collections::HashMap;
use syn::parse::{Parse, ParseStream, Result};
use syn::{braced, parse_macro_input, Ident, LitStr, Token};
use syn::{braced, punctuated::Punctuated, Ident, LitStr, Token};

#[cfg(test)]
mod tests;

mod kw {
syn::custom_keyword!(Keywords);
Expand All @@ -19,7 +46,6 @@ impl Parse for Keyword {
let name = input.parse()?;
input.parse::<Token![:]>()?;
let value = input.parse()?;
input.parse::<Token![,]>()?;

Ok(Keyword { name, value })
}
Expand All @@ -37,78 +63,101 @@ impl Parse for Symbol {
Ok(_) => Some(input.parse()?),
Err(_) => None,
};
input.parse::<Token![,]>()?;

Ok(Symbol { name, value })
}
}

/// A type used to greedily parse another type until the input is empty.
struct List<T>(Vec<T>);

impl<T: Parse> Parse for List<T> {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let mut list = Vec::new();
while !input.is_empty() {
list.push(input.parse()?);
}
Ok(List(list))
}
}

struct Input {
keywords: List<Keyword>,
symbols: List<Symbol>,
keywords: Punctuated<Keyword, Token![,]>,
symbols: Punctuated<Symbol, Token![,]>,
}

impl Parse for Input {
fn parse(input: ParseStream<'_>) -> Result<Self> {
input.parse::<kw::Keywords>()?;
let content;
braced!(content in input);
let keywords = content.parse()?;
let keywords = Punctuated::parse_terminated(&content)?;

input.parse::<kw::Symbols>()?;
let content;
braced!(content in input);
let symbols = content.parse()?;
let symbols = Punctuated::parse_terminated(&content)?;

Ok(Input { keywords, symbols })
}
}

#[derive(Default)]
struct Errors {
list: Vec<syn::Error>,
}

impl Errors {
fn error(&mut self, span: Span, message: String) {
self.list.push(syn::Error::new(span, message));
}
}

pub fn symbols(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as Input);
let (mut output, errors) = symbols_with_errors(input);

// If we generated any errors, then report them as compiler_error!() macro calls.
// This lets the errors point back to the most relevant span. It also allows us
// to report as many errors as we can during a single run.
output.extend(errors.into_iter().map(|e| e.to_compile_error()));

output
}

fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec<syn::Error>) {
let mut errors = Errors::default();

let input: Input = match syn::parse2(input) {
Ok(input) => input,
Err(e) => {
// This allows us to display errors at the proper span, while minimizing
// unrelated errors caused by bailing out (and not generating code).
errors.list.push(e);
Input { keywords: Default::default(), symbols: Default::default() }
}
};

let mut keyword_stream = quote! {};
let mut symbols_stream = quote! {};
let mut digits_stream = quote! {};
let mut prefill_stream = quote! {};
let mut counter = 0u32;
let mut keys = HashSet::<String>::new();
let mut prev_key: Option<String> = None;
let mut errors = Vec::<String>::new();

let mut check_dup = |str: &str, errors: &mut Vec<String>| {
if !keys.insert(str.to_string()) {
errors.push(format!("Symbol `{}` is duplicated", str));
let mut keys =
HashMap::<String, Span>::with_capacity(input.keywords.len() + input.symbols.len() + 10);
let mut prev_key: Option<(Span, String)> = None;

let mut check_dup = |span: Span, str: &str, errors: &mut Errors| {
if let Some(prev_span) = keys.get(str) {
errors.error(span, format!("Symbol `{}` is duplicated", str));
errors.error(*prev_span, format!("location of previous definition"));
} else {
keys.insert(str.to_string(), span);
}
};

let mut check_order = |str: &str, errors: &mut Vec<String>| {
if let Some(ref prev_str) = prev_key {
let mut check_order = |span: Span, str: &str, errors: &mut Errors| {
if let Some((prev_span, ref prev_str)) = prev_key {
if str < prev_str {
errors.push(format!("Symbol `{}` must precede `{}`", str, prev_str));
errors.error(span, format!("Symbol `{}` must precede `{}`", str, prev_str));
errors.error(prev_span, format!("location of previous symbol `{}`", prev_str));
}
}
prev_key = Some(str.to_string());
prev_key = Some((span, str.to_string()));
};

// Generate the listed keywords.
for keyword in &input.keywords.0 {
for keyword in input.keywords.iter() {
let name = &keyword.name;
let value = &keyword.value;
check_dup(&value.value(), &mut errors);
let value_string = value.value();
check_dup(keyword.name.span(), &value_string, &mut errors);
prefill_stream.extend(quote! {
#value,
});
Expand All @@ -120,14 +169,15 @@ pub fn symbols(input: TokenStream) -> TokenStream {
}

// Generate the listed symbols.
for symbol in &input.symbols.0 {
for symbol in input.symbols.iter() {
let name = &symbol.name;
let value = match &symbol.value {
Some(value) => value.value(),
None => name.to_string(),
};
check_dup(&value, &mut errors);
check_order(&name.to_string(), &mut errors);
check_dup(symbol.name.span(), &value, &mut errors);
check_order(symbol.name.span(), &name.to_string(), &mut errors);

prefill_stream.extend(quote! {
#value,
});
Expand All @@ -142,7 +192,7 @@ pub fn symbols(input: TokenStream) -> TokenStream {
// Generate symbols for the strings "0", "1", ..., "9".
for n in 0..10 {
let n = n.to_string();
check_dup(&n, &mut errors);
check_dup(Span::call_site(), &n, &mut errors);
prefill_stream.extend(quote! {
#n,
});
Expand All @@ -152,14 +202,7 @@ pub fn symbols(input: TokenStream) -> TokenStream {
counter += 1;
}

if !errors.is_empty() {
for error in errors.into_iter() {
eprintln!("error: {}", error)
}
panic!("errors in `Keywords` and/or `Symbols`");
}

let tt = TokenStream::from(quote! {
let output = quote! {
macro_rules! keywords {
() => {
#keyword_stream
Expand All @@ -184,11 +227,16 @@ pub fn symbols(input: TokenStream) -> TokenStream {
])
}
}
});
};

// To see the generated code generated, uncomment this line, recompile, and
// run the resulting output through `rustfmt`.
//eprintln!("{}", tt);
(output, errors.list)

tt
// To see the generated code, use the "cargo expand" command.
// Do this once to install:
// cargo install cargo-expand
//
// Then, cd to rustc_span and run:
// cargo expand > /tmp/rustc_span_expanded.rs
//
// and read that file.
}
102 changes: 102 additions & 0 deletions compiler/rustc_macros/src/symbols/tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
use super::*;

// This test is mainly here for interactive development. Use this test while
// you're working on the proc-macro defined in this file.
#[test]
fn test_symbols() {
// We textually include the symbol.rs file, which contains the list of all
// symbols, keywords, and common words. Then we search for the
// `symbols! { ... }` call.

static SYMBOL_RS_FILE: &str = include_str!("../../../rustc_span/src/symbol.rs");

let file = syn::parse_file(SYMBOL_RS_FILE).unwrap();
let symbols_path: syn::Path = syn::parse_quote!(symbols);

let m: &syn::ItemMacro = file
.items
.iter()
.filter_map(|i| {
if let syn::Item::Macro(m) = i {
if m.mac.path == symbols_path { Some(m) } else { None }
} else {
None
}
})
.next()
.expect("did not find `symbols!` macro invocation.");

let body_tokens = m.mac.tokens.clone();

test_symbols_macro(body_tokens, &[]);
}

fn test_symbols_macro(input: TokenStream, expected_errors: &[&str]) {
let (output, found_errors) = symbols_with_errors(input);

// It should always parse.
let _parsed_file = syn::parse2::<syn::File>(output).unwrap();

assert_eq!(
found_errors.len(),
expected_errors.len(),
"Macro generated a different number of errors than expected"
);

for (found_error, &expected_error) in found_errors.iter().zip(expected_errors.iter()) {
let found_error_str = format!("{}", found_error);
assert_eq!(found_error_str, expected_error);
}
}

#[test]
fn check_dup_keywords() {
let input = quote! {
Keywords {
Crate: "crate",
Crate: "crate",
}
Symbols {}
};
test_symbols_macro(input, &["Symbol `crate` is duplicated", "location of previous definition"]);
}

#[test]
fn check_dup_symbol() {
let input = quote! {
Keywords {}
Symbols {
splat,
splat,
}
};
test_symbols_macro(input, &["Symbol `splat` is duplicated", "location of previous definition"]);
}

#[test]
fn check_dup_symbol_and_keyword() {
let input = quote! {
Keywords {
Splat: "splat",
}
Symbols {
splat,
}
};
test_symbols_macro(input, &["Symbol `splat` is duplicated", "location of previous definition"]);
}

#[test]
fn check_symbol_order() {
let input = quote! {
Keywords {}
Symbols {
zebra,
aardvark,
}
};
test_symbols_macro(
input,
&["Symbol `aardvark` must precede `zebra`", "location of previous symbol `zebra`"],
);
}

0 comments on commit 331e740

Please sign in to comment.