diff --git a/Cargo.toml b/Cargo.toml index bd8b8daf5..9777f609a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,10 +3,12 @@ members = [ "askama", "askama_actix", "askama_axum", - "askama_gotham", "askama_derive", "askama_escape", + "askama_fmt", + "askama_gotham", "askama_mendes", + "askama_parser", "askama_rocket", "askama_tide", "askama_warp", diff --git a/askama_derive/Cargo.toml b/askama_derive/Cargo.toml index 1d176b710..b928d512f 100644 --- a/askama_derive/Cargo.toml +++ b/askama_derive/Cargo.toml @@ -13,7 +13,7 @@ edition = "2018" proc-macro = true [features] -config = ["serde", "toml"] +config = ["serde", "toml", "askama_parser/config"] humansize = [] markdown = [] urlencode = [] @@ -30,6 +30,7 @@ with-tide = [] with-warp = [] [dependencies] +askama_parser = { version = "0.1.0", path = "../askama_parser" } mime = "0.3" mime_guess = "2" nom = "7" diff --git a/askama_derive/src/generator.rs b/askama_derive/src/generator.rs index 08c1de168..6b7f5648c 100644 --- a/askama_derive/src/generator.rs +++ b/askama_derive/src/generator.rs @@ -1,14 +1,15 @@ -use crate::config::{get_template_source, read_config_file, Config, WhitespaceHandling}; +use crate::config::{get_template_source, Config, WhitespaceHandling}; use crate::heritage::{Context, Heritage}; use crate::input::{Print, Source, TemplateInput}; use crate::parser::{parse, Cond, CondTest, Expr, Loop, Node, Target, When, Whitespace, Ws}; use crate::CompileError; +use askama_parser::generator::{find_used_templates, TemplateArgs}; use proc_macro::TokenStream; -use quote::{quote, ToTokens}; +use quote::quote; use std::collections::hash_map::{Entry, HashMap}; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::{cmp, hash, mem, str}; /// The actual implementation for askama_derive::Template @@ -16,7 +17,7 @@ pub(crate) fn derive_template(input: TokenStream) -> TokenStream { let ast: syn::DeriveInput = syn::parse(input).unwrap(); match build_template(&ast) { Ok(source) => source.parse().unwrap(), - Err(e) => e.into_compile_error(), + Err(e) => e.into_compile_error().into(), } } @@ -29,8 +30,7 @@ pub(crate) fn derive_template(input: TokenStream) -> TokenStream { /// value as passed to the `template()` attribute. fn build_template(ast: &syn::DeriveInput) -> Result { let template_args = TemplateArgs::new(ast)?; - let config_toml = read_config_file(template_args.config_path.as_deref())?; - let config = Config::new(&config_toml)?; + let config = Config::from_file(template_args.config_path.as_deref())?; let input = TemplateInput::new(ast, &config, template_args)?; let source: String = match input.source { Source::Source(ref s) => s.clone(), @@ -66,7 +66,7 @@ fn build_template(ast: &syn::DeriveInput) -> Result { &contexts, heritage.as_ref(), MapChain::new(), - config.whitespace, + config.whitespace(), ) .build(&contexts[input.path.as_path()])?; if input.print == Print::Code || input.print == Print::All { @@ -75,161 +75,6 @@ fn build_template(ast: &syn::DeriveInput) -> Result { Ok(code) } -#[derive(Default)] -pub(crate) struct TemplateArgs { - pub(crate) source: Option, - pub(crate) print: Print, - pub(crate) escaping: Option, - pub(crate) ext: Option, - pub(crate) syntax: Option, - pub(crate) config_path: Option, -} - -impl TemplateArgs { - fn new(ast: &'_ syn::DeriveInput) -> Result { - // Check that an attribute called `template()` exists once and that it is - // the proper type (list). - let mut template_args = None; - for attr in &ast.attrs { - let ident = match attr.path.get_ident() { - Some(ident) => ident, - None => continue, - }; - - if ident == "template" { - if template_args.is_some() { - return Err("duplicated 'template' attribute".into()); - } - - match attr.parse_meta() { - Ok(syn::Meta::List(syn::MetaList { nested, .. })) => { - template_args = Some(nested); - } - Ok(_) => return Err("'template' attribute must be a list".into()), - Err(e) => return Err(format!("unable to parse attribute: {}", e).into()), - } - } - } - let template_args = - template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?; - - let mut args = Self::default(); - // Loop over the meta attributes and find everything that we - // understand. Return a CompileError if something is not right. - // `source` contains an enum that can represent `path` or `source`. - for item in template_args { - let pair = match item { - syn::NestedMeta::Meta(syn::Meta::NameValue(ref pair)) => pair, - _ => { - return Err(format!( - "unsupported attribute argument {:?}", - item.to_token_stream() - ) - .into()) - } - }; - let ident = match pair.path.get_ident() { - Some(ident) => ident, - None => unreachable!("not possible in syn::Meta::NameValue(…)"), - }; - - if ident == "path" { - if let syn::Lit::Str(ref s) = pair.lit { - if args.source.is_some() { - return Err("must specify 'source' or 'path', not both".into()); - } - args.source = Some(Source::Path(s.value())); - } else { - return Err("template path must be string literal".into()); - } - } else if ident == "source" { - if let syn::Lit::Str(ref s) = pair.lit { - if args.source.is_some() { - return Err("must specify 'source' or 'path', not both".into()); - } - args.source = Some(Source::Source(s.value())); - } else { - return Err("template source must be string literal".into()); - } - } else if ident == "print" { - if let syn::Lit::Str(ref s) = pair.lit { - args.print = s.value().parse()?; - } else { - return Err("print value must be string literal".into()); - } - } else if ident == "escape" { - if let syn::Lit::Str(ref s) = pair.lit { - args.escaping = Some(s.value()); - } else { - return Err("escape value must be string literal".into()); - } - } else if ident == "ext" { - if let syn::Lit::Str(ref s) = pair.lit { - args.ext = Some(s.value()); - } else { - return Err("ext value must be string literal".into()); - } - } else if ident == "syntax" { - if let syn::Lit::Str(ref s) = pair.lit { - args.syntax = Some(s.value()) - } else { - return Err("syntax value must be string literal".into()); - } - } else if ident == "config" { - if let syn::Lit::Str(ref s) = pair.lit { - args.config_path = Some(s.value()) - } else { - return Err("config value must be string literal".into()); - } - } else { - return Err(format!("unsupported attribute key {:?} found", ident).into()); - } - } - - Ok(args) - } -} - -fn find_used_templates( - input: &TemplateInput<'_>, - map: &mut HashMap, - source: String, -) -> Result<(), CompileError> { - let mut dependency_graph = Vec::new(); - let mut check = vec![(input.path.clone(), source)]; - while let Some((path, source)) = check.pop() { - for n in parse(&source, input.syntax)? { - match n { - Node::Extends(Expr::StrLit(extends)) => { - let extends = input.config.find_template(extends, Some(&path))?; - let dependency_path = (path.clone(), extends.clone()); - if dependency_graph.contains(&dependency_path) { - return Err(format!( - "cyclic dependecy in graph {:#?}", - dependency_graph - .iter() - .map(|e| format!("{:#?} --> {:#?}", e.0, e.1)) - .collect::>() - ) - .into()); - } - dependency_graph.push(dependency_path); - let source = get_template_source(&extends)?; - check.push((extends, source)); - } - Node::Import(_, import, _) => { - let import = input.config.find_template(import, Some(&path))?; - let source = get_template_source(&import)?; - check.push((import, source)); - } - _ => {} - } - } - map.insert(path, source); - } - Ok(()) -} - struct Generator<'a> { // The template input state: original struct AST and attributes input: &'a TemplateInput<'a>, @@ -597,7 +442,7 @@ impl<'a> Generator<'a> { Node::Lit(lws, val, rws) => { self.visit_lit(lws, val, rws); } - Node::Comment(ws) => { + Node::Comment(ws, _) => { self.write_comment(ws); } Node::Expr(ws, ref val) => { @@ -612,7 +457,7 @@ impl<'a> Generator<'a> { Node::Cond(ref conds, ws) => { self.write_cond(ctx, buf, conds, ws)?; } - Node::Match(ws1, ref expr, ref arms, ws2) => { + Node::Match(ws1, ref expr, _, ref arms, ws2) => { self.write_match(ctx, buf, ws1, expr, arms, ws2)?; } Node::Loop(ref loop_block) => { @@ -1425,9 +1270,7 @@ impl<'a> Generator<'a> { Some(name) => self .input .config - .escapers - .iter() - .find_map(|(escapers, escaper)| escapers.contains(name).then_some(escaper)) + .find_escaper(name) .ok_or_else(|| CompileError::from("invalid escaper for escape filter"))?, None => self.input.escaper, }; diff --git a/askama_derive/src/heritage.rs b/askama_derive/src/heritage.rs index 9556145f1..d74be3393 100644 --- a/askama_derive/src/heritage.rs +++ b/askama_derive/src/heritage.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; use crate::config::Config; -use crate::parser::{Expr, Loop, Macro, Node}; +use crate::parser::{Loop, Macro, Node}; use crate::CompileError; pub(crate) struct Heritage<'a> { @@ -44,7 +44,7 @@ pub(crate) struct Context<'a> { impl Context<'_> { pub(crate) fn new<'n>( - config: &Config<'_>, + config: &Config, path: &Path, nodes: &'n [Node<'n>], ) -> Result, CompileError> { @@ -58,7 +58,7 @@ impl Context<'_> { while let Some(nodes) = nested.pop() { for n in nodes { match n { - Node::Extends(Expr::StrLit(extends_path)) if top => match extends { + Node::Extends(extends_path) if top => match extends { Some(_) => return Err("multiple extend blocks found".into()), None => { extends = Some(config.find_template(extends_path, Some(path))?); @@ -93,7 +93,7 @@ impl Context<'_> { nested.push(body); nested.push(else_block); } - Node::Match(_, _, arms, _) => { + Node::Match(_, _, _, arms, _) => { for (_, _, arm) in arms { nested.push(arm); } diff --git a/askama_derive/src/lib.rs b/askama_derive/src/lib.rs index 2acf58380..61723f812 100644 --- a/askama_derive/src/lib.rs +++ b/askama_derive/src/lib.rs @@ -2,67 +2,18 @@ #![deny(elided_lifetimes_in_paths)] #![deny(unreachable_pub)] -use std::borrow::Cow; -use std::fmt; - use proc_macro::TokenStream; -use proc_macro2::Span; -mod config; +use askama_parser::{config, input, parser, CompileError}; + mod generator; mod heritage; -mod input; -mod parser; #[proc_macro_derive(Template, attributes(template))] pub fn derive_template(input: TokenStream) -> TokenStream { generator::derive_template(input) } -#[derive(Debug, Clone)] -struct CompileError { - msg: Cow<'static, str>, - span: Span, -} - -impl CompileError { - fn new>>(s: S, span: Span) -> Self { - Self { - msg: s.into(), - span, - } - } - - fn into_compile_error(self) -> TokenStream { - syn::Error::new(self.span, self.msg) - .to_compile_error() - .into() - } -} - -impl std::error::Error for CompileError {} - -impl fmt::Display for CompileError { - #[inline] - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt.write_str(&self.msg) - } -} - -impl From<&'static str> for CompileError { - #[inline] - fn from(s: &'static str) -> Self { - Self::new(s, Span::call_site()) - } -} - -impl From for CompileError { - #[inline] - fn from(s: String) -> Self { - Self::new(s, Span::call_site()) - } -} - // This is used by the code generator to decide whether a named filter is part of // Askama or should refer to a local `filters` module. It should contain all the // filters shipped with Askama, even the optional ones (since optional inclusion diff --git a/askama_fmt/Cargo.toml b/askama_fmt/Cargo.toml new file mode 100644 index 000000000..4f8ca8169 --- /dev/null +++ b/askama_fmt/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "askama_fmt" +version = "0.1.0" +description = "Formatter for Askama template syntax" +homepage = "https://github.com/djc/askama" +repository = "https://github.com/djc/askama" +license = "MIT/Apache-2.0" +workspace = ".." +readme = "README.md" +edition = "2018" + +[dependencies] +askama_parser = { version = "0.1.0", path = "../askama_parser", features = ["config"] } diff --git a/askama_fmt/src/lib.rs b/askama_fmt/src/lib.rs new file mode 100644 index 000000000..c0156b27d --- /dev/null +++ b/askama_fmt/src/lib.rs @@ -0,0 +1,917 @@ +use askama_parser::config::Syntax; +use askama_parser::parser::{Loop, Macro, Node, Whitespace, Ws}; + +pub fn ws_to_char(ws: &Whitespace) -> char { + match ws { + Whitespace::Preserve => '+', + Whitespace::Suppress => '-', + Whitespace::Minimize => '~', + } +} + +fn block_tag(buf: &mut String, syn: &Syntax, ws: &Ws, f: F) { + structured(buf, &syn.block_start, &syn.block_end, true, ws, f); +} + +fn structured( + buf: &mut String, + open: &str, + close: &str, + padding: bool, + ws: &Ws, + f: F, +) { + buf.push_str(open); + ws.0.iter().map(ws_to_char).for_each(|c| buf.push(c)); + if padding { + buf.push(' '); + } + f(buf); + if padding { + buf.push(' '); + } + ws.1.iter().map(ws_to_char).for_each(|c| buf.push(c)); + buf.push_str(close); +} + +pub fn fmt(ast: &[Node], syn: &Syntax) -> String { + let mut buf = String::new(); + + for node in ast { + match node { + Node::Lit(lws, val, rws) => { + buf.push_str(lws); + buf.push_str(val); + buf.push_str(rws); + } + Node::Comment(ws, text) => structured( + &mut buf, + &syn.comment_start, + &syn.comment_end, + false, + ws, + |buf| buf.push_str(text), + ), + Node::Expr(ws, expr) => { + structured(&mut buf, &syn.expr_start, &syn.expr_end, true, ws, |buf| { + expr_to_str(buf, expr) + }) + } + Node::Call(ws, scope, name, args) => block_tag(&mut buf, syn, ws, |buf| { + buf.push_str("call "); + if let Some(scope) = scope { + buf.push_str(scope); + buf.push_str("::"); + } + buf.push_str(name); + buf.push('('); + let mut first = true; + for arg in args { + if first { + first = false; + } else { + buf.push_str(", "); + } + + expr_to_str(buf, arg); + } + buf.push(')'); + }), + Node::LetDecl(ws, target) => block_tag(&mut buf, syn, ws, |buf| { + buf.push_str("let "); + target_to_str(buf, target); + }), + Node::Let(ws, target, expr) => block_tag(&mut buf, syn, ws, |buf| { + buf.push_str("let "); + target_to_str(buf, target); + buf.push_str(" = "); + expr_to_str(buf, expr); + }), + Node::Cond(blocks, ws) => { + let mut print_else = false; + for (bws, cond, block) in blocks { + block_tag(&mut buf, syn, bws, |buf| { + if print_else { + buf.push_str("else"); + } + if let Some(test) = cond { + if print_else { + buf.push(' '); + } + buf.push_str("if "); + if let Some(target) = &test.target { + buf.push_str("let "); + target_to_str(buf, target); + buf.push_str(" = "); + } + expr_to_str(buf, &test.expr); + } + if !print_else { + print_else = true; + } + }); + + buf.push_str(&fmt(block, syn)); + } + block_tag(&mut buf, syn, ws, |buf| buf.push_str("endif")); + } + Node::Match(lws, expr, interstitial, blocks, rws) => { + block_tag(&mut buf, syn, lws, |buf| { + buf.push_str("match "); + expr_to_str(buf, expr); + }); + + buf.push_str(&fmt(interstitial, syn)); + + for (ws, target, block) in blocks { + block_tag(&mut buf, syn, ws, |buf| { + buf.push_str("when "); + target_to_str(buf, target); + }); + buf.push_str(&fmt(block, syn)); + } + + block_tag(&mut buf, syn, rws, |buf| buf.push_str("endmatch")); + } + Node::Loop(Loop { + ws1, + var, + iter, + cond, + body, + ws2, + else_block, + ws3, + }) => { + block_tag(&mut buf, syn, ws1, |buf| { + buf.push_str("for "); + target_to_str(buf, var); + buf.push_str(" in "); + expr_to_str(buf, iter); + + if let Some(cond) = cond { + buf.push_str(" if "); + expr_to_str(buf, cond); + } + }); + + buf.push_str(&fmt(body, syn)); + + if !else_block.is_empty() { + block_tag(&mut buf, syn, ws2, |buf| buf.push_str("else")); + + buf.push_str(&fmt(else_block, syn)); + } + + block_tag(&mut buf, syn, ws3, |buf| buf.push_str("endfor")); + } + Node::Extends(parent) => { + let ws = &Ws(None, None); + block_tag(&mut buf, syn, ws, |buf| { + buf.push_str("extends "); + strlit_to_str(buf, parent); + }); + } + Node::BlockDef(lws, name, body, rws) => { + block_tag(&mut buf, syn, lws, |buf| { + buf.push_str("block "); + buf.push_str(name); + }); + buf.push_str(&fmt(body, syn)); + block_tag(&mut buf, syn, rws, |buf| { + buf.push_str("endblock"); + }); + } + Node::Include(ws, name) => { + block_tag(&mut buf, syn, ws, |buf| { + buf.push_str("include "); + strlit_to_str(buf, name); + }); + } + Node::Import(ws, name, alias) => { + block_tag(&mut buf, syn, ws, |buf| { + buf.push_str("import "); + strlit_to_str(buf, name); + buf.push_str(" as "); + buf.push_str(alias); + }); + } + Node::Macro( + name, + Macro { + ws1, + args, + nodes, + ws2, + }, + ) => { + block_tag(&mut buf, syn, ws1, |buf| { + buf.push_str("macro "); + buf.push_str(name); + buf.push('('); + let mut first = true; + for arg in args { + if first { + first = false; + } else { + buf.push_str(", "); + } + buf.push_str(arg); + } + buf.push(')'); + }); + buf.push_str(&fmt(nodes, syn)); + block_tag(&mut buf, syn, ws2, |buf| { + buf.push_str("endmacro"); + }); + } + Node::Raw(ws1, lws, val, rws, ws2) => { + block_tag(&mut buf, syn, ws1, |buf| buf.push_str("raw")); + buf.push_str(lws); + buf.push_str(val); + buf.push_str(rws); + block_tag(&mut buf, syn, ws2, |buf| buf.push_str("endraw")); + } + Node::Break(ws) => { + block_tag(&mut buf, syn, ws, |buf| buf.push_str("break")); + } + Node::Continue(ws) => { + block_tag(&mut buf, syn, ws, |buf| buf.push_str("continue")); + } + } + } + + buf +} + +fn target_to_str(buf: &mut String, target: &askama_parser::parser::Target) { + use askama_parser::parser::Target::*; + match target { + Name(name) => buf.push_str(name), + Tuple(path, elements) => { + if !path.is_empty() { + buf.push_str(&path.join("::")); + buf.push_str(" with "); + } + + buf.push('('); + + let mut print_comma = false; + for element in elements { + if print_comma { + buf.push_str(", "); + } else { + print_comma = true; + } + + target_to_str(buf, element); + } + + if elements.len() == 1 && path.is_empty() { + buf.push(','); + } + + buf.push(')'); + } + Struct(path, fields) => { + buf.push_str(&path.join("::")); + buf.push_str(" with { "); + let mut first = true; + for field in fields { + if first { + first = false; + } else { + buf.push_str(", "); + } + buf.push_str(field.0); + if let askama_parser::parser::Target::Name(n) = field.1 { + if n != field.0 { + buf.push_str(": "); + target_to_str(buf, &field.1); + } + } + } + buf.push_str(" }"); + } + NumLit(val) => { + buf.push_str(val); + } + StrLit(val) => { + buf.push('"'); + buf.push_str(val); + buf.push('"'); + } + CharLit(val) => { + buf.push('\''); + buf.push_str(val); + buf.push('\''); + } + BoolLit(val) => { + buf.push_str(val); + } + Path(path) => buf.push_str(&path.join("::")), + } +} + +fn expr_to_str(buf: &mut String, expr: &askama_parser::parser::Expr) { + use askama_parser::parser::Expr::*; + match expr { + BoolLit(s) | NumLit(s) | Var(s) => buf.push_str(s), + StrLit(s) => { + strlit_to_str(buf, s); + } + CharLit(s) => { + buf.push_str("'"); + buf.push_str(s); + buf.push_str("'"); + } + Path(ss) => { + buf.push_str(&ss.join("::")); + } + Array(exprs) => { + buf.push('['); + let mut first = true; + for el in exprs { + if first { + first = false; + } else { + buf.push_str(", "); + } + expr_to_str(buf, el); + } + buf.push(']'); + } + Attr(expr, field) => { + expr_to_str(buf, expr); + buf.push('.'); + buf.push_str(field); + } + Index(expr, idx) => { + expr_to_str(buf, expr); + buf.push('['); + expr_to_str(buf, idx); + buf.push(']'); + } + Filter(name, args) => { + assert!(!args.is_empty()); + expr_to_str(buf, &args[0]); + buf.push('|'); + buf.push_str(name); + if args.len() > 1 { + buf.push('('); + let mut first = true; + for arg in args.iter().skip(1) { + if first { + first = false; + } else { + buf.push_str(", "); + } + expr_to_str(buf, arg); + } + buf.push(')'); + } + } + Unary(op, arg) => { + buf.push_str(op); + expr_to_str(buf, arg); + } + BinOp(op, lhs, rhs) => { + expr_to_str(buf, lhs); + buf.push(' '); + buf.push_str(op); + buf.push(' '); + expr_to_str(buf, rhs); + } + Range(op, lhs, rhs) => { + if let Some(lhs) = lhs { + expr_to_str(buf, lhs); + } + buf.push_str(op); + if let Some(rhs) = rhs { + expr_to_str(buf, rhs); + } + } + Group(expr) => { + buf.push('('); + expr_to_str(buf, expr); + buf.push(')'); + } + Tuple(els) => { + buf.push('('); + let mut first = true; + for el in els { + if first { + first = false; + } else { + buf.push_str(", "); + } + expr_to_str(buf, el); + } + + if els.len() == 1 { + buf.push(','); + } + + buf.push(')'); + } + Call(callee, args) => { + expr_to_str(buf, callee); + buf.push('('); + let mut first = true; + for arg in args { + if first { + first = false; + } else { + buf.push_str(", "); + } + expr_to_str(buf, arg); + } + buf.push(')'); + } + RustMacro(name, input) => { + buf.push_str(name); + buf.push('!'); + buf.push('('); + buf.push_str(input); + buf.push(')'); + } + Try(expr) => { + expr_to_str(buf, expr); + buf.push('?'); + } + } +} + +fn strlit_to_str(buf: &mut String, s: &str) { + buf.push_str("\""); + buf.push_str(s); + buf.push_str("\""); +} + +#[cfg(test)] +mod tests { + use super::*; + + use askama_parser::config::Syntax; + use askama_parser::parser::{parse, Expr, Target, Ws}; + + fn custom() -> Syntax { + Syntax { + block_start: "".into(), + comment_start: "".into(), + expr_start: "<:".into(), + expr_end: ":>".into(), + } + } + + #[test] + fn lit() { + let syn = Syntax::default(); + let node = parse(" foobar\t", &syn).expect("PARSE"); + + assert_eq!(" foobar\t", fmt(&node, &syn)); + } + + #[test] + fn comment() { + let syn = Syntax::default(); + let node = parse("foo{#+ empty -#}bar", &syn).expect("PARSE"); + + assert_eq!("foo{#+ empty -#}bar", fmt(&node, &syn)); + assert_eq!("foobar", fmt(&node, &custom())); + } + + #[test] + fn expr() { + let syn = Syntax::default(); + let node = parse("{{42}}", &syn).expect("PARSE"); + + assert_eq!("{{ 42 }}", fmt(&node, &syn)); + assert_eq!("<: 42 :>", fmt(&node, &custom())); + } + + fn test_target(expected: &str, target: Target) { + let syn = Syntax::default(); + let node = Node::Let(Ws(None, None), target, Expr::Var("val")); + + let str1 = fmt(&[node], &syn); + assert_eq!(str1, format!("{{% let {} = val %}}", expected)); + + let parsed = parse(&str1, &syn).expect("PARSE"); + let str2 = fmt(&parsed, &syn); + assert_eq!(str1, str2); + } + + #[test] + fn target_name() { + test_target("foo", Target::Name("foo")); + } + #[test] + fn target_tuple_unit() { + test_target("()", Target::Tuple(vec![], vec![])); + } + #[test] + fn target_tuple_anon() { + test_target("(a,)", Target::Tuple(vec![], vec![Target::Name("a")])); + } + #[test] + fn target_tuple_named() { + test_target( + "Some with (val)", + Target::Tuple(vec!["Some"], vec![Target::Name("val")]), + ); + } + #[test] + fn target_struct() { + test_target( + "Color with { r, g: lime, b }", + Target::Struct( + vec!["Color"], + vec![ + ("r", Target::Name("r")), + ("g", Target::Name("lime")), + ("b", Target::Name("b")), + ], + ), + ); + } + #[test] + fn target_numlit() { + test_target("42", Target::NumLit("42")); + } + #[test] + fn target_strlit() { + test_target("\"foo\\\"bar\"", Target::StrLit("foo\\\"bar")); + } + #[test] + fn target_charlit() { + test_target("'.'", Target::CharLit(".")); + } + #[test] + fn target_boollit() { + test_target("false", Target::BoolLit("false")); + } + #[test] + fn target_path() { + test_target("foo::bar", Target::Path(vec!["foo", "bar"])); + } + + fn test_expr(expected: &str, expr: Expr) { + let syn = Syntax::default(); + let node = Node::Expr(Ws(None, None), expr); + + let str1 = fmt(&[node], &syn); + assert_eq!(str1, format!("{{{{ {} }}}}", expected)); + + let parsed = parse(&str1, &syn).expect("PARSE"); + let str2 = fmt(&parsed, &syn); + assert_eq!(str1, str2); + } + + #[test] + fn expr_bool_lit() { + test_expr("true", Expr::BoolLit("true")); + } + #[test] + fn expr_num_lit() { + test_expr("42", Expr::NumLit("42")); + } + #[test] + fn expr_str_lit() { + test_expr("\"foo\\\"bar\"", Expr::StrLit("foo\\\"bar")); + } + #[test] + fn expr_char_lit() { + test_expr("'c'", Expr::CharLit("c")); + } + #[test] + fn expr_var() { + test_expr("value", Expr::Var("value")); + } + #[test] + fn expr_path() { + test_expr("askama::Template", Expr::Path(vec!["askama", "Template"])); + } + #[test] + fn expr_array() { + test_expr( + "[1, 2]", + Expr::Array(vec![Expr::NumLit("1"), Expr::NumLit("2")]), + ); + } + #[test] + fn expr_attr() { + test_expr("obj.field", Expr::Attr(Box::new(Expr::Var("obj")), "field")); + } + #[test] + fn expr_index() { + test_expr( + "arr[idx]", + Expr::Index(Box::new(Expr::Var("arr")), Box::new(Expr::Var("idx"))), + ); + } + #[test] + fn expr_filter() { + test_expr( + "input|filter(\"arg\")", + Expr::Filter("filter", vec![Expr::Var("input"), Expr::StrLit("arg")]), + ); + } + #[test] + fn expr_unary() { + test_expr("-42", Expr::Unary("-", Box::new(Expr::NumLit("42")))); + } + #[test] + fn expr_binop() { + test_expr( + "1 + 2", + Expr::BinOp( + "+", + Box::new(Expr::NumLit("1")), + Box::new(Expr::NumLit("2")), + ), + ); + } + #[test] + fn expr_range_oo() { + test_expr("..", Expr::Range("..", None, None)); + } + #[test] + fn expr_range_co() { + test_expr( + "1..", + Expr::Range("..", Some(Box::new(Expr::NumLit("1"))), None), + ); + } + #[test] + fn expr_range_oc() { + test_expr( + "..1", + Expr::Range("..", None, Some(Box::new(Expr::NumLit("1")))), + ); + } + #[test] + fn expr_range_right() { + test_expr( + "..=1", + Expr::Range("..=", None, Some(Box::new(Expr::NumLit("1")))), + ); + } + #[test] + fn expr_group() { + test_expr("(var)", Expr::Group(Box::new(Expr::Var("var")))); + } + #[test] + fn expr_tuple_one() { + test_expr("(var,)", Expr::Tuple(vec![Expr::Var("var")])); + } + #[test] + fn expr_tuple_two() { + test_expr("(a, b)", Expr::Tuple(vec![Expr::Var("a"), Expr::Var("b")])); + } + #[test] + fn expr_call() { + test_expr( + "foo(bar, baz)", + Expr::Call( + Box::new(Expr::Var("foo")), + vec![Expr::Var("bar"), Expr::Var("baz")], + ), + ); + } + #[test] + fn rust_macro() { + test_expr("do!(+#15 I$ 4@3)", Expr::RustMacro("do", "+#15 I$ 4@3")); + } + #[test] + fn try_() { + test_expr("maybe?", Expr::Try(Box::new(Expr::Var("maybe")))); + } + + #[test] + fn call() { + let syn = Syntax::default(); + let node = parse("{% call scope::macro(1, 2, 3) %}", &syn).expect("PARSE"); + + assert_eq!("{% call scope::macro(1, 2, 3) %}", fmt(&node, &syn)); + assert_eq!("", fmt(&node, &custom())); + } + + #[test] + fn let_decl() { + let syn = Syntax::default(); + let node = parse("{%let foo\t%}", &syn).expect("PARSE"); + + assert_eq!("{% let foo %}", fmt(&node, &syn)); + assert_eq!("", fmt(&node, &custom())); + } + + #[test] + fn let_() { + let syn = Syntax::default(); + let node = parse("{%let foo\t=\n42%}", &syn).expect("PARSE"); + + assert_eq!("{% let foo = 42 %}", fmt(&node, &syn)); + assert_eq!("", fmt(&node, &custom())); + } + + #[test] + fn cond() { + let syn = Syntax::default(); + let node = parse("{%if foo-%}bar{%-else\t-%}baz{%- endif\n%}", &syn).expect("PARSE"); + + assert_eq!( + "{% if foo -%}bar{%- else -%}baz{%- endif %}", + fmt(&node, &syn) + ); + assert_eq!( + "barbaz", + fmt(&node, &custom()) + ); + } + + #[test] + fn match_() { + let syn = Syntax::default(); + let node = parse( + "{%match item-%} + {% when Some + with\t (\t \"foo\" )\t-%} + Found literal foo + {% when Some with (val) -%} + Found {{ val }} + {% when None -%} +{% endmatch\n%}", + &syn, + ) + .expect("PARSE"); + + assert_eq!( + "{% match item -%} + {% when Some with (\"foo\") -%} + Found literal foo + {% when Some with (val) -%} + Found {{ val }} + {% when None -%} +{% endmatch %}", + fmt(&node, &syn) + ); + assert_eq!( + " + + Found literal foo + + Found <: val :> + +", + fmt(&node, &custom()) + ); + } + + #[test] + fn loop_() { + let syn = Syntax::default(); + let node = parse("{%for value in values-%}{{\tvalue\n}}{%endfor~%}", &syn).expect("PARSE"); + + assert_eq!( + "{% for value in values -%}{{ value }}{% endfor ~%}", + fmt(&node, &syn) + ); + assert_eq!( + "<: value :>", + fmt(&node, &custom()) + ); + } + + #[test] + fn loop_cond() { + let syn = Syntax::default(); + let node = parse( + "{%for value in values if true-%}{{\tvalue\n}}{%endfor~%}", + &syn, + ) + .expect("PARSE"); + + assert_eq!( + "{% for value in values if true -%}{{ value }}{% endfor ~%}", + fmt(&node, &syn) + ); + assert_eq!( + "<: value :>", + fmt(&node, &custom()) + ); + } + + #[test] + fn loop_else() { + let syn = Syntax::default(); + let node = parse( + "{%for value in values-%}{{\tvalue\n}}{%else%}NONE{%endfor~%}", + &syn, + ) + .expect("PARSE"); + + assert_eq!( + "{% for value in values -%}{{ value }}{% else %}NONE{% endfor ~%}", + fmt(&node, &syn) + ); + assert_eq!( + "<: value :>NONE", + fmt(&node, &custom()) + ); + } + + #[test] + fn extends() { + let syn = Syntax::default(); + let node = parse("{%extends \"base.html\"\t%}", &syn).expect("PARSE"); + + assert_eq!("{% extends \"base.html\" %}", fmt(&node, &syn)); + assert_eq!("", fmt(&node, &custom())); + } + + #[test] + fn block_def() { + let syn = Syntax::default(); + let node = parse("{%block title\t%}Hi!{%endblock%}", &syn).expect("PARSE"); + + assert_eq!("{% block title %}Hi!{% endblock %}", fmt(&node, &syn)); + assert_eq!("Hi!", fmt(&node, &custom())); + } + + #[test] + fn include() { + let syn = Syntax::default(); + let node = parse("{%include \"item.html\"\t%}", &syn).expect("PARSE"); + + assert_eq!("{% include \"item.html\" %}", fmt(&node, &syn)); + assert_eq!("", fmt(&node, &custom())); + } + + #[test] + fn import() { + let syn = Syntax::default(); + let node = parse("{%import \"macros.html\" as mod\t%}", &syn).expect("PARSE"); + + assert_eq!("{% import \"macros.html\" as mod %}", fmt(&node, &syn)); + assert_eq!("", fmt(&node, &custom())); + } + + #[test] + fn macro_() { + let syn = Syntax::default(); + let node = + parse("{%macro heading(arg)\t%}

{{arg}}

{%endmacro%}", &syn).expect("PARSE"); + + assert_eq!( + "{% macro heading(arg) %}

{{ arg }}

{% endmacro %}", + fmt(&node, &syn) + ); + assert_eq!( + "

<: arg :>

", + fmt(&node, &custom()) + ); + } + + #[test] + fn raw() { + let syn = Syntax::default(); + let node = parse("{%raw\t%}\n{{\twhat}}{%endraw%}", &syn).expect("PARSE"); + + assert_eq!("{% raw %}\n{{\twhat}}{% endraw %}", fmt(&node, &syn)); + assert_eq!("\n{{\twhat}}", fmt(&node, &custom())); + } + + #[test] + fn break_() { + let syn = Syntax::default(); + let node = parse("{%for value in values-%}{%\tbreak\n%}{%endfor~%}", &syn).expect("PARSE"); + + assert_eq!( + "{% for value in values -%}{% break %}{% endfor ~%}", + fmt(&node, &syn) + ); + assert_eq!( + "", + fmt(&node, &custom()) + ); + } + + #[test] + fn continue_() { + let syn = Syntax::default(); + let node = + parse("{%for value in values-%}{%\tcontinue\n%}{%endfor~%}", &syn).expect("PARSE"); + + assert_eq!( + "{% for value in values -%}{% continue %}{% endfor ~%}", + fmt(&node, &syn) + ); + assert_eq!( + "", + fmt(&node, &custom()) + ); + } +} diff --git a/askama_parser/Cargo.toml b/askama_parser/Cargo.toml new file mode 100644 index 000000000..90f2e1d75 --- /dev/null +++ b/askama_parser/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "askama_parser" +version = "0.1.0" +description = "Askama template syntax parser" +homepage = "https://github.com/djc/askama" +repository = "https://github.com/djc/askama" +license = "MIT/Apache-2.0" +workspace = ".." +readme = "README.md" +edition = "2018" + +[features] +config = ["serde", "toml"] + +[dependencies] +mime = "0.3" +mime_guess = "2" +nom = "7" +proc-macro2 = "1" +quote = "1" +serde = { version = "1.0", optional = true, features = ["derive"] } +syn = "1" +toml = { version = "0.5", optional = true } diff --git a/askama_parser/LICENSE-APACHE b/askama_parser/LICENSE-APACHE new file mode 120000 index 000000000..965b606f3 --- /dev/null +++ b/askama_parser/LICENSE-APACHE @@ -0,0 +1 @@ +../LICENSE-APACHE \ No newline at end of file diff --git a/askama_parser/LICENSE-MIT b/askama_parser/LICENSE-MIT new file mode 120000 index 000000000..76219eb72 --- /dev/null +++ b/askama_parser/LICENSE-MIT @@ -0,0 +1 @@ +../LICENSE-MIT \ No newline at end of file diff --git a/askama_parser/README.md b/askama_parser/README.md new file mode 100644 index 000000000..e27f1107d --- /dev/null +++ b/askama_parser/README.md @@ -0,0 +1,9 @@ +# askama_derive: procedural macros for the Askama templating engine + +[![Documentation](https://docs.rs/askama_derive/badge.svg)](https://docs.rs/askama_derive/) +[![Latest version](https://img.shields.io/crates/v/askama_derive.svg)](https://crates.io/crates/askama_derive) +[![Build Status](https://github.com/djc/askama/workflows/CI/badge.svg)](https://github.com/djc/askama/actions?query=workflow%3ACI) +[![Chat](https://badges.gitter.im/gitterHQ/gitter.svg)](https://gitter.im/djc/askama) + +This crate contains the procedural macros used by the +[Askama](https://github.com/djc/askama) templating engine. diff --git a/askama_derive/src/config.rs b/askama_parser/src/config.rs similarity index 77% rename from askama_derive/src/config.rs rename to askama_parser/src/config.rs index 14d4323e6..1df85aac3 100644 --- a/askama_derive/src/config.rs +++ b/askama_parser/src/config.rs @@ -1,3 +1,16 @@ +//! Askama parser configuration. +//! +//! This module handles the configuration format for Askama. +//! Load a `Config` object by calling `from_file`, pass `None` +//! to load the project's default `askama.toml`. +//! +//! ```no_run +//! use askama_parser::config::Config; +//! +//! let default_config = Config::from_file(None) +//! .expect("load config"); +//! ``` + use std::collections::{BTreeMap, HashSet}; use std::convert::TryFrom; use std::path::{Path, PathBuf}; @@ -8,17 +21,30 @@ use serde::Deserialize; use crate::CompileError; +/// Askama parser configuration. #[derive(Debug)] -pub(crate) struct Config<'a> { - pub(crate) dirs: Vec, - pub(crate) syntaxes: BTreeMap>, - pub(crate) default_syntax: &'a str, +pub struct Config { + dirs: Vec, + pub(crate) syntaxes: BTreeMap, + pub(crate) default_syntax: String, pub(crate) escapers: Vec<(HashSet, String)>, - pub(crate) whitespace: WhitespaceHandling, + whitespace: WhitespaceHandling, } -impl Config<'_> { - pub(crate) fn new(s: &str) -> std::result::Result, CompileError> { +impl Config { + /// Load Askama configuration from the project's config file. + /// + /// This will try to load TOML file with Askama configuration + /// for the dependent project. The config file is relative + /// to `CARGO_MANIFEST_DIR`. If a filename is not provided, + /// it defaults to `askama.toml`. + pub fn from_file(file: Option<&str>) -> std::result::Result { + let config_toml = read_config_file(file)?; + Config::from_toml(&config_toml) + } + + /// Load Askama configuration from TOML source. + pub fn from_toml(s: &str) -> std::result::Result { let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); let default_dirs = vec![root.join("templates")]; @@ -87,13 +113,14 @@ impl Config<'_> { Ok(Config { dirs, syntaxes, - default_syntax, + default_syntax: default_syntax.into(), escapers, whitespace, }) } - pub(crate) fn find_template( + /// Find a template file based on this configuration. + pub fn find_template( &self, path: &str, start_at: Option<&Path>, @@ -118,43 +145,62 @@ impl Config<'_> { ) .into()) } + + /// Find the escaper to use for the given content type. + pub fn find_escaper(&self, name: &str) -> Option<&str> { + self.escapers + .iter() + .find_map(|(escapers, escaper)| escapers.contains(name).then_some(escaper.as_ref())) + } + + /// The whitespace handling to use. + pub fn whitespace(&self) -> WhitespaceHandling { + self.whitespace + } } +/// The definition of a custom template syntax. #[derive(Debug)] -pub(crate) struct Syntax<'a> { - pub(crate) block_start: &'a str, - pub(crate) block_end: &'a str, - pub(crate) expr_start: &'a str, - pub(crate) expr_end: &'a str, - pub(crate) comment_start: &'a str, - pub(crate) comment_end: &'a str, +pub struct Syntax { + /// Defaults to `"{%"`. + pub block_start: String, + /// Defaults to `"%}"`. + pub block_end: String, + /// Defaults to `"{{"`. + pub expr_start: String, + /// Defaults to `"}}"`. + pub expr_end: String, + /// Defaults to `"{#"`. + pub comment_start: String, + /// Defaults to `"#}"`. + pub comment_end: String, } -impl Default for Syntax<'_> { +impl Default for Syntax { fn default() -> Self { Self { - block_start: "{%", - block_end: "%}", - expr_start: "{{", - expr_end: "}}", - comment_start: "{#", - comment_end: "#}", + block_start: "{%".into(), + block_end: "%}".into(), + expr_start: "{{".into(), + expr_end: "}}".into(), + comment_start: "{#".into(), + comment_end: "#}".into(), } } } -impl<'a> TryFrom> for Syntax<'a> { +impl<'a> TryFrom> for Syntax { type Error = CompileError; fn try_from(raw: RawSyntax<'a>) -> std::result::Result { let default = Self::default(); let syntax = Self { - block_start: raw.block_start.unwrap_or(default.block_start), - block_end: raw.block_end.unwrap_or(default.block_end), - expr_start: raw.expr_start.unwrap_or(default.expr_start), - expr_end: raw.expr_end.unwrap_or(default.expr_end), - comment_start: raw.comment_start.unwrap_or(default.comment_start), - comment_end: raw.comment_end.unwrap_or(default.comment_end), + block_start: raw.block_start.map(ToString::to_string).unwrap_or(default.block_start), + block_end: raw.block_end.map(ToString::to_string).unwrap_or(default.block_end), + expr_start: raw.expr_start.map(ToString::to_string).unwrap_or(default.expr_start), + expr_end: raw.expr_end.map(ToString::to_string).unwrap_or(default.expr_end), + comment_start: raw.comment_start.map(ToString::to_string).unwrap_or(default.comment_start), + comment_end: raw.comment_end.map(ToString::to_string).unwrap_or(default.comment_end), }; if syntax.block_start.len() != 2 @@ -202,10 +248,11 @@ impl RawConfig<'_> { } } +/// How should we handle whitespace in the template? #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[cfg_attr(feature = "serde", derive(Deserialize))] #[cfg_attr(feature = "serde", serde(field_identifier, rename_all = "lowercase"))] -pub(crate) enum WhitespaceHandling { +pub enum WhitespaceHandling { /// The default behaviour. It will leave the whitespace characters "as is". Preserve, /// It'll remove all the whitespace characters before and after the jinja block. @@ -248,9 +295,7 @@ struct RawEscaper<'a> { extensions: Vec<&'a str>, } -pub(crate) fn read_config_file( - config_path: Option<&str>, -) -> std::result::Result { +fn read_config_file(config_path: Option<&str>) -> std::result::Result { let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); let filename = match config_path { Some(config_path) => root.join(config_path), @@ -274,8 +319,9 @@ where vals.iter().map(|s| s.to_string()).collect() } +/// Load a template file to a string. #[allow(clippy::match_wild_err_arm)] -pub(crate) fn get_template_source(tpl_path: &Path) -> std::result::Result { +pub fn get_template_source(tpl_path: &Path) -> std::result::Result { match fs::read_to_string(tpl_path) { Err(_) => Err(format!( "unable to open template file '{}'", @@ -309,7 +355,7 @@ mod tests { #[test] fn get_source() { - let path = Config::new("") + let path = Config::from_toml("") .and_then(|config| config.find_template("b.html", None)) .unwrap(); assert_eq!(get_template_source(&path).unwrap(), "bar"); @@ -319,7 +365,7 @@ mod tests { fn test_default_config() { let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); root.push("templates"); - let config = Config::new("").unwrap(); + let config = Config::from_toml("").unwrap(); assert_eq!(config.dirs, vec![root]); } @@ -328,7 +374,7 @@ mod tests { fn test_config_dirs() { let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); root.push("tpl"); - let config = Config::new("[general]\ndirs = [\"tpl\"]").unwrap(); + let config = Config::from_toml("[general]\ndirs = [\"tpl\"]").unwrap(); assert_eq!(config.dirs, vec![root]); } @@ -342,7 +388,7 @@ mod tests { #[test] fn find_absolute() { - let config = Config::new("").unwrap(); + let config = Config::from_toml("").unwrap(); let root = config.find_template("a.html", None).unwrap(); let path = config.find_template("sub/b.html", Some(&root)).unwrap(); assert_eq_rooted(&path, "sub/b.html"); @@ -351,14 +397,14 @@ mod tests { #[test] #[should_panic] fn find_relative_nonexistent() { - let config = Config::new("").unwrap(); + let config = Config::from_toml("").unwrap(); let root = config.find_template("a.html", None).unwrap(); config.find_template("c.html", Some(&root)).unwrap(); } #[test] fn find_relative() { - let config = Config::new("").unwrap(); + let config = Config::from_toml("").unwrap(); let root = config.find_template("sub/b.html", None).unwrap(); let path = config.find_template("c.html", Some(&root)).unwrap(); assert_eq_rooted(&path, "sub/c.html"); @@ -366,7 +412,7 @@ mod tests { #[test] fn find_relative_sub() { - let config = Config::new("").unwrap(); + let config = Config::from_toml("").unwrap(); let root = config.find_template("sub/b.html", None).unwrap(); let path = config.find_template("sub1/d.html", Some(&root)).unwrap(); assert_eq_rooted(&path, "sub/sub1/d.html"); @@ -389,7 +435,7 @@ mod tests { "#; let default_syntax = Syntax::default(); - let config = Config::new(raw_config).unwrap(); + let config = Config::from_toml(raw_config).unwrap(); assert_eq!(config.default_syntax, "foo"); let foo = config.syntaxes.get("foo").unwrap(); @@ -421,7 +467,7 @@ mod tests { "#; let default_syntax = Syntax::default(); - let config = Config::new(raw_config).unwrap(); + let config = Config::from_toml(raw_config).unwrap(); assert_eq!(config.default_syntax, "foo"); let foo = config.syntaxes.get("foo").unwrap(); @@ -449,7 +495,7 @@ mod tests { syntax = [{ name = "default" }] "#; - let _config = Config::new(raw_config).unwrap(); + let _config = Config::from_toml(raw_config).unwrap(); } #[cfg(feature = "toml")] @@ -461,7 +507,7 @@ mod tests { { name = "foo", block_start = "%%" } ] "#; - let _config = Config::new(raw_config).unwrap(); + let _config = Config::from_toml(raw_config).unwrap(); } #[cfg(feature = "toml")] @@ -473,13 +519,13 @@ mod tests { default_syntax = "foo" "#; - let _config = Config::new(raw_config).unwrap(); + let _config = Config::from_toml(raw_config).unwrap(); } #[cfg(feature = "config")] #[test] fn escape_modes() { - let config = Config::new( + let config = Config::from_toml( r#" [[escaper]] path = "::askama::Js" @@ -504,7 +550,7 @@ mod tests { #[cfg(feature = "config")] #[test] fn test_whitespace_parsing() { - let config = Config::new( + let config = Config::from_toml( r#" [general] whitespace = "suppress" @@ -513,10 +559,10 @@ mod tests { .unwrap(); assert_eq!(config.whitespace, WhitespaceHandling::Suppress); - let config = Config::new(r#""#).unwrap(); + let config = Config::from_toml(r#""#).unwrap(); assert_eq!(config.whitespace, WhitespaceHandling::Preserve); - let config = Config::new( + let config = Config::from_toml( r#" [general] whitespace = "preserve" @@ -525,7 +571,7 @@ mod tests { .unwrap(); assert_eq!(config.whitespace, WhitespaceHandling::Preserve); - let config = Config::new( + let config = Config::from_toml( r#" [general] whitespace = "minimize" diff --git a/askama_parser/src/generator.rs b/askama_parser/src/generator.rs new file mode 100644 index 000000000..312f28605 --- /dev/null +++ b/askama_parser/src/generator.rs @@ -0,0 +1,163 @@ +use crate::config::get_template_source; +use crate::input::{Print, Source, TemplateInput}; +use crate::parser::{parse, Node}; +use crate::CompileError; + +use std::collections::hash_map::HashMap; +use std::path::PathBuf; + +#[derive(Default)] +pub struct TemplateArgs { + pub source: Option, + pub print: Print, + pub escaping: Option, + pub ext: Option, + pub syntax: Option, + pub config_path: Option, +} + +impl TemplateArgs { + pub fn new(ast: &'_ syn::DeriveInput) -> Result { + // Check that an attribute called `template()` exists once and that it is + // the proper type (list). + let mut template_args = None; + for attr in &ast.attrs { + let ident = match attr.path.get_ident() { + Some(ident) => ident, + None => continue, + }; + + if ident == "template" { + if template_args.is_some() { + return Err("duplicated 'template' attribute".into()); + } + + match attr.parse_meta() { + Ok(syn::Meta::List(syn::MetaList { nested, .. })) => { + template_args = Some(nested); + } + Ok(_) => return Err("'template' attribute must be a list".into()), + Err(e) => return Err(format!("unable to parse attribute: {}", e).into()), + } + } + } + let template_args = + template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?; + + let mut args = Self::default(); + // Loop over the meta attributes and find everything that we + // understand. Return a CompileError if something is not right. + // `source` contains an enum that can represent `path` or `source`. + for item in template_args { + let pair = match item { + syn::NestedMeta::Meta(syn::Meta::NameValue(ref pair)) => pair, + _ => { + use quote::ToTokens; + return Err(format!( + "unsupported attribute argument {:?}", + item.to_token_stream() + ) + .into()); + } + }; + let ident = match pair.path.get_ident() { + Some(ident) => ident, + None => unreachable!("not possible in syn::Meta::NameValue(…)"), + }; + + if ident == "path" { + if let syn::Lit::Str(ref s) = pair.lit { + if args.source.is_some() { + return Err("must specify 'source' or 'path', not both".into()); + } + args.source = Some(Source::Path(s.value())); + } else { + return Err("template path must be string literal".into()); + } + } else if ident == "source" { + if let syn::Lit::Str(ref s) = pair.lit { + if args.source.is_some() { + return Err("must specify 'source' or 'path', not both".into()); + } + args.source = Some(Source::Source(s.value())); + } else { + return Err("template source must be string literal".into()); + } + } else if ident == "print" { + if let syn::Lit::Str(ref s) = pair.lit { + args.print = s.value().parse()?; + } else { + return Err("print value must be string literal".into()); + } + } else if ident == "escape" { + if let syn::Lit::Str(ref s) = pair.lit { + args.escaping = Some(s.value()); + } else { + return Err("escape value must be string literal".into()); + } + } else if ident == "ext" { + if let syn::Lit::Str(ref s) = pair.lit { + args.ext = Some(s.value()); + } else { + return Err("ext value must be string literal".into()); + } + } else if ident == "syntax" { + if let syn::Lit::Str(ref s) = pair.lit { + args.syntax = Some(s.value()) + } else { + return Err("syntax value must be string literal".into()); + } + } else if ident == "config" { + if let syn::Lit::Str(ref s) = pair.lit { + args.config_path = Some(s.value()) + } else { + return Err("config value must be string literal".into()); + } + } else { + return Err(format!("unsupported attribute key {:?} found", ident).into()); + } + } + + Ok(args) + } +} + +pub fn find_used_templates( + input: &TemplateInput<'_>, + map: &mut HashMap, + source: String, +) -> Result<(), CompileError> { + let mut dependency_graph = Vec::new(); + let mut check = vec![(input.path.clone(), source)]; + while let Some((path, source)) = check.pop() { + for n in parse(&source, input.syntax)? { + match n { + Node::Extends(extends) => { + let extends = input.config.find_template(extends, Some(&path))?; + let dependency_path = (path.clone(), extends.clone()); + if dependency_graph.contains(&dependency_path) { + return Err(format!( + "cyclic dependecy in graph {:#?}", + dependency_graph + .iter() + .map(|e| format!("{:#?} --> {:#?}", e.0, e.1)) + .collect::>() + ) + .into()); + } + dependency_graph.push(dependency_path); + let source = get_template_source(&extends)?; + check.push((extends, source)); + } + Node::Import(_, import, _) => { + let import = input.config.find_template(import, Some(&path))?; + let source = get_template_source(&import)?; + check.push((import, source)); + } + _ => {} + } + } + map.insert(path, source); + } + Ok(()) +} diff --git a/askama_derive/src/input.rs b/askama_parser/src/input.rs similarity index 78% rename from askama_derive/src/input.rs rename to askama_parser/src/input.rs index 68d01db1d..ae663c0c4 100644 --- a/askama_derive/src/input.rs +++ b/askama_parser/src/input.rs @@ -1,3 +1,5 @@ +//! Input for the `template()` attribute. + use crate::config::{Config, Syntax}; use crate::generator::TemplateArgs; use crate::CompileError; @@ -7,25 +9,35 @@ use std::str::FromStr; use mime::Mime; -pub(crate) struct TemplateInput<'a> { - pub(crate) ast: &'a syn::DeriveInput, - pub(crate) config: &'a Config<'a>, - pub(crate) syntax: &'a Syntax<'a>, - pub(crate) source: Source, - pub(crate) print: Print, - pub(crate) escaper: &'a str, - pub(crate) ext: Option, - pub(crate) mime_type: String, - pub(crate) path: PathBuf, +/// Input configuration passed to the Askama `template()` attribute. +pub struct TemplateInput<'a> { + /// The original raw `syn::DeriveInput` parsed. + pub ast: &'a syn::DeriveInput, + /// The configuration in use. + pub config: &'a Config, + /// The syntax used for this template. + pub syntax: &'a Syntax, + /// The source of the template. + pub source: Source, + /// Debug printing mode. + pub print: Print, + /// The escaper to use for the template. + pub escaper: &'a str, + /// The file extension specified for inline templates. + pub ext: Option, + /// The MIME type of the template's results. + pub mime_type: String, + /// The generated path of the template. + pub path: PathBuf, } impl TemplateInput<'_> { /// Extract the template metadata from the `DeriveInput` structure. This /// mostly recovers the data for the `TemplateInput` fields from the /// `template()` attribute list fields. - pub(crate) fn new<'n>( + pub fn new<'n>( ast: &'n syn::DeriveInput, - config: &'n Config<'_>, + config: &'n Config, args: TemplateArgs, ) -> Result, CompileError> { let TemplateArgs { @@ -51,7 +63,7 @@ impl TemplateInput<'_> { // Validate syntax let syntax = syntax.map_or_else( - || Ok(config.syntaxes.get(config.default_syntax).unwrap()), + || Ok(config.syntaxes.get(&config.default_syntax).unwrap()), |s| { config .syntaxes @@ -99,7 +111,7 @@ impl TemplateInput<'_> { } #[inline] - pub(crate) fn extension(&self) -> Option<&str> { + pub fn extension(&self) -> Option<&str> { ext_default_to_path(self.ext.as_deref(), &self.path) } } @@ -123,16 +135,32 @@ fn extension(path: &Path) -> Option<&str> { } } -pub(crate) enum Source { +/// Ways to specify the source for an Askama template. +pub enum Source { + /// Load the specified template file. + /// + /// The path is interpreted as relative to the configured template directories + /// (by default, this is a templates directory next to your Cargo.toml). Path(String), + /// Directly set the template source. + /// + /// This can be useful for test cases or short templates. The generated path is + /// undefined, which generally makes it impossible to refer to this template + /// from other templates. Source(String), } -#[derive(PartialEq)] -pub(crate) enum Print { +/// Print debug information at compile time. +#[derive(PartialEq, Eq)] +#[non_exhaustive] +pub enum Print { + /// Print all debug info. All, + /// Print the parsed syntax tree. Ast, + /// Print the generated code. Code, + /// Do not print. None, } diff --git a/askama_parser/src/lib.rs b/askama_parser/src/lib.rs new file mode 100644 index 000000000..2893763c0 --- /dev/null +++ b/askama_parser/src/lib.rs @@ -0,0 +1,59 @@ +#![forbid(unsafe_code)] +#![deny(elided_lifetimes_in_paths)] +#![deny(unreachable_pub)] + +use std::borrow::Cow; +use std::fmt; + +use proc_macro2::{Span, TokenStream}; + +pub mod config; +pub mod generator; +pub mod input; +pub mod parser; + +/// An error that occurred during compilation, along with the source location. +#[derive(Debug, Clone)] +pub struct CompileError { + msg: Cow<'static, str>, + span: Span, +} + +impl CompileError { + /// Create a new error, reporting a failure which is described by the message + /// and occurred at the specified source location. + pub fn new>>(s: S, span: Span) -> Self { + Self { + msg: s.into(), + span, + } + } + + /// Convert the error into a Rust compiler error. + pub fn into_compile_error(self) -> TokenStream { + syn::Error::new(self.span, self.msg).to_compile_error() + } +} + +impl std::error::Error for CompileError {} + +impl fmt::Display for CompileError { + #[inline] + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.write_str(&self.msg) + } +} + +impl From<&'static str> for CompileError { + #[inline] + fn from(s: &'static str) -> Self { + Self::new(s, Span::call_site()) + } +} + +impl From for CompileError { + #[inline] + fn from(s: String) -> Self { + Self::new(s, Span::call_site()) + } +} diff --git a/askama_derive/src/parser.rs b/askama_parser/src/parser.rs similarity index 87% rename from askama_derive/src/parser.rs rename to askama_parser/src/parser.rs index dc163fae7..63eaa19f6 100644 --- a/askama_derive/src/parser.rs +++ b/askama_parser/src/parser.rs @@ -1,3 +1,5 @@ +//! Parser and AST nodes for Askama's template syntax. + use std::cell::Cell; use std::str; @@ -13,65 +15,196 @@ use nom::{self, error_position, AsChar, IResult, InputTakeAtPosition}; use crate::config::Syntax; use crate::CompileError; +/// An abstract syntax tree node. #[derive(Debug, PartialEq)] -pub(crate) enum Node<'a> { +pub enum Node<'a> { + /// Literal text to output directly. + /// + /// The first and third tuple elements are the left- and right-side + /// white space surrounding the output, and the second is the value + /// itself. Lit(&'a str, &'a str, &'a str), - Comment(Ws), + /// A block comment. + /// + /// ```ignore + /// {# A Comment #} + /// ``` + Comment(Ws, &'a str), + /// An expression, the result of which will be output. + /// + /// ```ignore + /// {{ 25 / 6 - 4 }} + /// ``` Expr(Ws, Expr<'a>), + /// A macro invocation. + /// + /// ```ignore + /// {% call scope::heading(s) %} + /// ``` + /// + /// The second tuple element is the optional scope, the third is the + /// name of the macro, and the last element is the macro arguments. Call(Ws, Option<&'a str>, &'a str, Vec>), + /// A variable declaration without an assignment. + /// + /// ```ignore + /// {% let val %} + /// ``` LetDecl(Ws, Target<'a>), + /// A variable assignment. + /// + /// ```ignore + /// {% let val = "foo" %} + /// ``` Let(Ws, Target<'a>, Expr<'a>), + /// An if-else block. + /// + /// ```ignore + /// {% if users.len() == 0 %} + /// No users + /// {% else if users.len() == 1 %} + /// 1 user + /// {% else %} + /// {{ users.len() }} users + /// {% endif %} + /// ``` Cond(Vec>, Ws), - Match(Ws, Expr<'a>, Vec>, Ws), + /// A match block with several clauses. + /// + /// ```ignore + /// {% match item %} + /// {% when Some with ("foo") %} + /// Found literal foo + /// {% when Some with (val) %} + /// Found {{ val }} + /// {% when None %} + /// {% endmatch %} + /// ``` + Match(Ws, Expr<'a>, Vec>, Vec>, Ws), + /// A for loop. + /// + /// ```ignore + /// Users + /// ----- + /// {% for user in users %} + /// - {{ user.name }} + /// {% endfor %} + /// ``` Loop(Loop<'a>), - Extends(Expr<'a>), + /// A template inheritance declaration. + /// + /// ```ignore + /// {% extends "base.html" %} + /// ``` + Extends(&'a str), + /// A block definition. + /// + /// ```ignore + /// {% block title %}Index{% endblock %} + /// ``` BlockDef(Ws, &'a str, Vec>, Ws), + /// Include the specified template file inline here. + /// + /// ```ignore + /// {% include "item.html" %} + /// ``` Include(Ws, &'a str), + /// Import macros from another template file. + /// + /// ```ignore + /// {% import "macros.html" as scope %} + /// ``` Import(Ws, &'a str, &'a str), + /// A macro declaration. + /// + /// ```ignore + /// {% macro heading(arg) %} + /// {{arg}} + /// ------- + /// {% endmacro %} + /// ``` Macro(&'a str, Macro<'a>), + /// A raw block. + /// + /// ```ignore + /// {% raw %} + /// {{ this * is - not + an % expression }} + /// {% endraw %} + /// ``` + /// + /// The second and fourth tuple elements are the left- and right-side + /// white space surrounding the output, and the third is the value + /// itself. Raw(Ws, &'a str, &'a str, &'a str, Ws), + /// The break statement. Break(Ws), + /// The continue statement. Continue(Ws), } +/// A for loop syntax node. #[derive(Debug, PartialEq)] -pub(crate) struct Loop<'a> { - pub(crate) ws1: Ws, - pub(crate) var: Target<'a>, - pub(crate) iter: Expr<'a>, - pub(crate) cond: Option>, - pub(crate) body: Vec>, - pub(crate) ws2: Ws, - pub(crate) else_block: Vec>, - pub(crate) ws3: Ws, -} - +pub struct Loop<'a> { + /// The whitespace suppression for the start tag. + pub ws1: Ws, + /// The variable of iteration within the loop. + pub var: Target<'a>, + /// The collection to iterate over. + pub iter: Expr<'a>, + /// An optional condition, which if it evaluates to false should skip that iteration. + pub cond: Option>, + /// The body of the loop. + pub body: Vec>, + /// The whitespace suppression for the else tag. + pub ws2: Ws, + /// The else block of the loop, invoked if the collection is empty. + pub else_block: Vec>, + /// The whitespace suppression for the end tag. + pub ws3: Ws, +} + +/// An expression syntax node. #[derive(Debug, PartialEq)] -pub(crate) enum Expr<'a> { +pub enum Expr<'a> { + /// A boolean literal. BoolLit(&'a str), + /// A numeric literal. NumLit(&'a str), + /// A string literal. StrLit(&'a str), + /// A character literal. CharLit(&'a str), + /// A variable reference. Var(&'a str), + /// A path reference. Path(Vec<&'a str>), + /// An array of expressions. Array(Vec>), + /// An attribute reference of an expression. Attr(Box>, &'a str), + /// An index into an expression. Index(Box>, Box>), + /// An application of a filter to an expression. Filter(&'a str, Vec>), + /// A unary operation. Unary(&'a str, Box>), + /// A binary operation. BinOp(&'a str, Box>, Box>), Range(&'a str, Option>>, Option>>), Group(Box>), + /// A tuple expression. Tuple(Vec>), + /// A function call expression. Call(Box>, Vec>), RustMacro(&'a str, &'a str), + /// The Askama equivalent of Rust's try operator `?`. Try(Box>), } impl Expr<'_> { /// Returns `true` if enough assumptions can be made, /// to determine that `self` is copyable. - pub(crate) fn is_copyable(&self) -> bool { + pub fn is_copyable(&self) -> bool { self.is_copyable_within_op(false) } @@ -99,7 +232,7 @@ impl Expr<'_> { } /// Returns `true` if this is an `Attr` where the `obj` is `"self"`. - pub(crate) fn is_attr_self(&self) -> bool { + pub fn is_attr_self(&self) -> bool { match self { Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Var("self")) => true, Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Attr(..)) => obj.is_attr_self(), @@ -110,7 +243,7 @@ impl Expr<'_> { /// Returns `true` if the outcome of this expression may be used multiple times in the same /// `write!()` call, without evaluating the expression again, i.e. the expression should be /// side-effect free. - pub(crate) fn is_cachable(&self) -> bool { + pub fn is_cachable(&self) -> bool { match self { // Literals are the definition of pure: Expr::BoolLit(_) => true, @@ -141,30 +274,46 @@ impl Expr<'_> { } } -pub(crate) type When<'a> = (Ws, Target<'a>, Vec>); +/// A single branch of a match block. +pub type When<'a> = (Ws, Target<'a>, Vec>); +/// A macro definition. #[derive(Debug, PartialEq)] -pub(crate) struct Macro<'a> { - pub(crate) ws1: Ws, - pub(crate) args: Vec<&'a str>, - pub(crate) nodes: Vec>, - pub(crate) ws2: Ws, -} - +pub struct Macro<'a> { + /// Whitespace suppreeeession for the begin macro tag. + pub ws1: Ws, + /// The names of the macro's arguments. + pub args: Vec<&'a str>, + /// The body of the macro. + pub nodes: Vec>, + /// Whitespace suppression for the end macro tag. + pub ws2: Ws, +} + +/// The Askama equivalent of a Rust pattern, the target of a match or assignment. #[derive(Debug, PartialEq)] -pub(crate) enum Target<'a> { +pub enum Target<'a> { + /// Bind the value to a name. Name(&'a str), + /// Destruture a tuple value. Tuple(Vec<&'a str>, Vec>), + /// Destructure a struct value. Struct(Vec<&'a str>, Vec<(&'a str, Target<'a>)>), + /// Match a numeric literal. NumLit(&'a str), + /// Match a string literal. StrLit(&'a str), + /// Match a character literal. CharLit(&'a str), + /// Match a boolean literal. BoolLit(&'a str), + /// Match against a path. Path(Vec<&'a str>), } -#[derive(Clone, Copy, Debug, PartialEq)] -pub(crate) enum Whitespace { +/// Whitespace preservation or suppression. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum Whitespace { Preserve, Suppress, Minimize, @@ -181,18 +330,23 @@ impl From for Whitespace { } } -/// First field is "minus/plus sign was used on the left part of the item". +/// Whitespace suppression for a block. /// -/// Second field is "minus/plus sign was used on the right part of the item". -#[derive(Clone, Copy, Debug, PartialEq)] -pub(crate) struct Ws(pub(crate) Option, pub(crate) Option); +/// The first tuple value is the setting (`-`/`+`) for the left side of block. +/// The second tuple value is the setting (`-`/`+`) for the right side of the block. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Ws(pub Option, pub Option); -pub(crate) type Cond<'a> = (Ws, Option>, Vec>); +/// A single condition with its consequent. +pub type Cond<'a> = (Ws, Option>, Vec>); +/// An if or if let condition. #[derive(Debug, PartialEq)] -pub(crate) struct CondTest<'a> { - pub(crate) target: Option>, - pub(crate) expr: Expr<'a>, +pub struct CondTest<'a> { + /// For an if let, the assignment target. + pub target: Option>, + /// The condition expression to evaluate. + pub expr: Expr<'a>, } fn is_ws(c: char) -> bool { @@ -239,15 +393,15 @@ fn skip_till<'a, O>( } struct State<'a> { - syntax: &'a Syntax<'a>, + syntax: &'a Syntax, loop_depth: Cell, } fn take_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { let p_start = alt(( - tag(s.syntax.block_start), - tag(s.syntax.comment_start), - tag(s.syntax.expr_start), + tag(AsRef::::as_ref(&s.syntax.block_start)), + tag(AsRef::::as_ref(&s.syntax.comment_start)), + tag(AsRef::::as_ref(&s.syntax.expr_start)), )); let (i, _) = not(eof)(i)?; @@ -860,7 +1014,7 @@ fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { opt(expr_handle_ws), |i| tag_block_end(i, s), cut(tuple(( - ws(many0(ws(value((), |i| block_comment(i, s))))), + |i| parse_ws_or_comment(i, s), many1(|i| when_block(i, s)), cut(tuple(( opt(|i| match_else_block(i, s)), @@ -874,14 +1028,14 @@ fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { ))), ))), )); - let (i, (pws1, _, (expr, nws1, _, (_, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?; + let (i, (pws1, _, (expr, nws1, _, (interstitial, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?; let mut arms = arms; if let Some(arm) = else_arm { arms.push(arm); } - Ok((i, Node::Match(Ws(pws1, nws1), expr, arms, Ws(pws2, nws2)))) + Ok((i, Node::Match(Ws(pws1, nws1), expr, interstitial, arms, Ws(pws2, nws2)))) } fn block_let(i: &str) -> IResult<&str, Node<'_>> { @@ -974,7 +1128,7 @@ fn block_for<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { } fn block_extends(i: &str) -> IResult<&str, Node<'_>> { - let (i, (_, name)) = tuple((ws(tag("extends")), ws(expr_str_lit)))(i)?; + let (i, (_, name)) = tuple((ws(tag("extends")), ws(str_lit)))(i)?; Ok((i, Node::Extends(name))) } @@ -1146,9 +1300,10 @@ fn block_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { let mut level = 0; + let initial = i; loop { - let (end, tail) = take_until(s.syntax.comment_end)(i)?; - match take_until::<_, _, Error<_>>(s.syntax.comment_start)(i) { + let (end, _) = take_until(AsRef::::as_ref(&s.syntax.comment_end))(i)?; + match take_until::<_, _, Error<_>>(AsRef::::as_ref(&s.syntax.comment_start))(i) { Ok((start, _)) if start.as_ptr() < end.as_ptr() => { level += 1; i = &start[2..]; @@ -1157,7 +1312,10 @@ fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a level -= 1; i = &end[2..]; } - _ => return Ok((end, tail)), + _ => { + let len = initial.len() - end.len(); + return Ok((end, &initial[..len])); + } } } } @@ -1181,7 +1339,11 @@ fn block_comment<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { } else { None }; - Ok((i, Node::Comment(Ws(pws, nws)))) + let text = match nws { + Some(_) => &tail[..tail.len()-1], + None => tail, + }; + Ok((i, Node::Comment(Ws(pws, nws), text))) } fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> { @@ -1193,29 +1355,37 @@ fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> { + many0(alt(( + complete(|i| take_content(i, s)), + complete(|i| block_comment(i, s)), + )))(i) +} + fn tag_block_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.block_start)(i) + tag(AsRef::::as_ref(&s.syntax.block_start))(i) } fn tag_block_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.block_end)(i) + tag(AsRef::::as_ref(&s.syntax.block_end))(i) } fn tag_comment_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.comment_start)(i) + tag(AsRef::::as_ref(&s.syntax.comment_start))(i) } fn tag_comment_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.comment_end)(i) + tag(AsRef::::as_ref(&s.syntax.comment_end))(i) } fn tag_expr_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.expr_start)(i) + tag(AsRef::::as_ref(&s.syntax.expr_start))(i) } fn tag_expr_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.expr_end)(i) + tag(AsRef::::as_ref(&s.syntax.expr_end))(i) } -pub(crate) fn parse<'a>( - src: &'a str, - syntax: &'a Syntax<'a>, -) -> Result>, CompileError> { +/// Parse template source to an abstract syntax tree. +/// +/// Tries to parse the provided template string using the given syntax. +/// If successful, returns the list of nodes parsed. +pub fn parse<'a>(src: &'a str, syntax: &'a Syntax) -> Result>, CompileError> { let state = State { syntax, loop_depth: Cell::new(0), @@ -1483,8 +1653,8 @@ mod tests { #[test] fn change_delimiters_parse_filter() { let syntax = Syntax { - expr_start: "{=", - expr_end: "=}", + expr_start: "{=".into(), + expr_end: "=}".into(), ..Syntax::default() }; @@ -1701,99 +1871,99 @@ mod tests { assert_eq!( super::parse("{##}", s).unwrap(), - vec![Node::Comment(Ws(None, None))], + vec![Node::Comment(Ws(None, None), "")], ); assert_eq!( super::parse("{#- #}", s).unwrap(), - vec![Node::Comment(Ws(Some(Whitespace::Suppress), None))], + vec![Node::Comment(Ws(Some(Whitespace::Suppress), None), " ")], ); assert_eq!( super::parse("{# -#}", s).unwrap(), - vec![Node::Comment(Ws(None, Some(Whitespace::Suppress)))], + vec![Node::Comment(Ws(None, Some(Whitespace::Suppress)), " ")], ); assert_eq!( super::parse("{#--#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Suppress), Some(Whitespace::Suppress) - ))], + ), "")], ); assert_eq!( super::parse("{#- foo\n bar -#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Suppress), Some(Whitespace::Suppress) - ))], + ), " foo\n bar ")], ); assert_eq!( super::parse("{#- foo\n {#- bar\n -#} baz -#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Suppress), Some(Whitespace::Suppress) - ))], + ), " foo\n {#- bar\n -#} baz ")], ); assert_eq!( super::parse("{#+ #}", s).unwrap(), - vec![Node::Comment(Ws(Some(Whitespace::Preserve), None))], + vec![Node::Comment(Ws(Some(Whitespace::Preserve), None), " ")], ); assert_eq!( super::parse("{# +#}", s).unwrap(), - vec![Node::Comment(Ws(None, Some(Whitespace::Preserve)))], + vec![Node::Comment(Ws(None, Some(Whitespace::Preserve)), " ")], ); assert_eq!( super::parse("{#++#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Preserve), Some(Whitespace::Preserve) - ))], + ), "")], ); assert_eq!( super::parse("{#+ foo\n bar +#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Preserve), Some(Whitespace::Preserve) - ))], + ), " foo\n bar ")], ); assert_eq!( super::parse("{#+ foo\n {#+ bar\n +#} baz -+#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Preserve), Some(Whitespace::Preserve) - ))], + ), " foo\n {#+ bar\n +#} baz -")], ); assert_eq!( super::parse("{#~ #}", s).unwrap(), - vec![Node::Comment(Ws(Some(Whitespace::Minimize), None))], + vec![Node::Comment(Ws(Some(Whitespace::Minimize), None), " ")], ); assert_eq!( super::parse("{# ~#}", s).unwrap(), - vec![Node::Comment(Ws(None, Some(Whitespace::Minimize)))], + vec![Node::Comment(Ws(None, Some(Whitespace::Minimize)), " ")], ); assert_eq!( super::parse("{#~~#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Minimize), Some(Whitespace::Minimize) - ))], + ), "")], ); assert_eq!( super::parse("{#~ foo\n bar ~#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Minimize), Some(Whitespace::Minimize) - ))], + ), " foo\n bar ")], ); assert_eq!( super::parse("{#~ foo\n {#~ bar\n ~#} baz -~#}", s).unwrap(), vec![Node::Comment(Ws( Some(Whitespace::Minimize), Some(Whitespace::Minimize) - ))], + ), " foo\n {#~ bar\n ~#} baz -")], ); assert_eq!( super::parse("{# foo {# bar #} {# {# baz #} qux #} #}", s).unwrap(), - vec![Node::Comment(Ws(None, None))], + vec![Node::Comment(Ws(None, None), " foo {# bar #} {# {# baz #} qux #} ")], ); } diff --git a/askama_parser/templates/a.html b/askama_parser/templates/a.html new file mode 100644 index 000000000..257cc5642 --- /dev/null +++ b/askama_parser/templates/a.html @@ -0,0 +1 @@ +foo diff --git a/askama_parser/templates/b.html b/askama_parser/templates/b.html new file mode 100644 index 000000000..5716ca598 --- /dev/null +++ b/askama_parser/templates/b.html @@ -0,0 +1 @@ +bar diff --git a/askama_parser/templates/sub/b.html b/askama_parser/templates/sub/b.html new file mode 100644 index 000000000..5716ca598 --- /dev/null +++ b/askama_parser/templates/sub/b.html @@ -0,0 +1 @@ +bar diff --git a/askama_parser/templates/sub/c.html b/askama_parser/templates/sub/c.html new file mode 100644 index 000000000..76018072e --- /dev/null +++ b/askama_parser/templates/sub/c.html @@ -0,0 +1 @@ +baz diff --git a/askama_parser/templates/sub/sub1/d.html b/askama_parser/templates/sub/sub1/d.html new file mode 100644 index 000000000..fa11a6a9c --- /dev/null +++ b/askama_parser/templates/sub/sub1/d.html @@ -0,0 +1 @@ +echo diff --git a/testing/tests/ui/match_with_extra.rs b/testing/tests/ui/match_with_extra.rs deleted file mode 100644 index 528441e38..000000000 --- a/testing/tests/ui/match_with_extra.rs +++ /dev/null @@ -1,20 +0,0 @@ -use askama::Template; - -#[derive(Template)] -#[template( - ext = "txt", - source = r#" -{%- match good -%} - // Help, I forgot how to write comments! - {%- when true %} - good - {%- when _ -%} - bad -{%- endmatch -%}"# -)] -struct MatchWithExtra { - good: bool, -} - -fn main() { -} diff --git a/testing/tests/ui/match_with_extra.stderr b/testing/tests/ui/match_with_extra.stderr deleted file mode 100644 index 8f515bd33..000000000 --- a/testing/tests/ui/match_with_extra.stderr +++ /dev/null @@ -1,8 +0,0 @@ -error: problems parsing template source at row 3, column 4 near: - "// Help, I forgot how to write comments!"... - --> tests/ui/match_with_extra.rs:3:10 - | -3 | #[derive(Template)] - | ^^^^^^^^ - | - = note: this error originates in the derive macro `Template` (in Nightly builds, run with -Z macro-backtrace for more info)