| diff --git a/src/generator.rs b/src/generator.rs |
| index 87d1f00..0dbcaa3 100644 |
| --- a/src/generator.rs |
| +++ b/src/generator.rs |
| @@ -22,7 +22,7 @@ use crate::docs::DocComment; |
| pub(crate) fn generate( |
| name: Ident, |
| generics: &Generics, |
| - path: Option<PathBuf>, |
| + paths: Vec<PathBuf>, |
| rules: Vec<OptimizedRule>, |
| defaults: Vec<&str>, |
| doc_comment: &DocComment, |
| @@ -32,10 +32,7 @@ pub(crate) fn generate( |
| |
| let builtins = generate_builtin_rules(); |
| let include_fix = if include_grammar { |
| - match path { |
| - Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")), |
| - None => quote!(), |
| - } |
| + generate_include(&name, paths) |
| } else { |
| quote!() |
| }; |
| @@ -170,17 +167,33 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> { |
| builtins |
| } |
| |
| -// Needed because Cargo doesn't watch for changes in grammars. |
| -fn generate_include(name: &Ident, path: &str) -> TokenStream { |
| +/// Generate Rust `include_str!` for grammar files, then Cargo will watch changes in grammars. |
| +fn generate_include(name: &Ident, paths: Vec<PathBuf>) -> TokenStream { |
| let const_name = format_ident!("_PEST_GRAMMAR_{}", name); |
| // Need to make this relative to the current directory since the path to the file |
| // is derived from the CARGO_MANIFEST_DIR environment variable |
| - let mut current_dir = std::env::current_dir().expect("Unable to get current directory"); |
| - current_dir.push(path); |
| - let relative_path = current_dir.to_str().expect("path contains invalid unicode"); |
| + let current_dir = std::env::current_dir().expect("Unable to get current directory"); |
| + |
| + let include_tokens = paths.iter().map(|path| { |
| + let path = path.to_str().expect("non-Unicode path"); |
| + |
| + let relative_path = current_dir |
| + .join(path) |
| + .to_str() |
| + .expect("path contains invalid unicode") |
| + .to_string(); |
| + |
| + quote! { |
| + include_str!(#relative_path) |
| + } |
| + }); |
| + |
| + let len = include_tokens.len(); |
| quote! { |
| #[allow(non_upper_case_globals)] |
| - const #const_name: &'static str = include_str!(#relative_path); |
| + const #const_name: [&'static str; #len] = [ |
| + #(#include_tokens),* |
| + ]; |
| } |
| } |
| |
| @@ -1016,14 +1029,16 @@ mod tests { |
| let defaults = vec!["ANY"]; |
| let result = result_type(); |
| let box_ty = box_type(); |
| - let mut current_dir = std::env::current_dir().expect("Unable to get current directory"); |
| - current_dir.push("test.pest"); |
| - let test_path = current_dir.to_str().expect("path contains invalid unicode"); |
| + let current_dir = std::env::current_dir().expect("Unable to get current directory"); |
| + |
| + let base_path = current_dir.join("base.pest").to_str().unwrap().to_string(); |
| + let test_path = current_dir.join("test.pest").to_str().unwrap().to_string(); |
| + |
| assert_eq!( |
| - generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, doc_comment, true).to_string(), |
| + generate(name, &generics, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(), |
| quote! { |
| #[allow(non_upper_case_globals)] |
| - const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path); |
| + const _PEST_GRAMMAR_MyParser: [&'static str; 2usize] = [include_str!(#base_path), include_str!(#test_path)]; |
| |
| #[doc = "This is Rule doc\nThis is second line"] |
| #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)] |
| diff --git a/src/lib.rs b/src/lib.rs |
| index f808987..7aed193 100644 |
| --- a/src/lib.rs |
| +++ b/src/lib.rs |
| @@ -27,7 +27,7 @@ use std::io::{self, Read}; |
| use std::path::Path; |
| |
| use proc_macro2::TokenStream; |
| -use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta}; |
| +use syn::{Attribute, DeriveInput, Expr, ExprLit, Generics, Ident, Lit, Meta}; |
| |
| #[macro_use] |
| mod macros; |
| @@ -45,7 +45,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream { |
| let (name, generics, contents) = parse_derive(ast); |
| |
| let mut data = String::new(); |
| - let mut path = None; |
| + let mut paths = vec![]; |
| |
| for content in contents { |
| let (_data, _path) = match content { |
| @@ -81,8 +81,9 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream { |
| }; |
| |
| data.push_str(&_data); |
| - if _path.is_some() { |
| - path = _path; |
| + match _path { |
| + Some(path) => paths.push(path), |
| + None => (), |
| } |
| } |
| |
| @@ -99,7 +100,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream { |
| generator::generate( |
| name, |
| &generics, |
| - path, |
| + paths, |
| optimized, |
| defaults, |
| &doc_comment, |
| @@ -127,11 +128,9 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) { |
| let grammar: Vec<&Attribute> = ast |
| .attrs |
| .iter() |
| - .filter(|attr| match attr.parse_meta() { |
| - Ok(Meta::NameValue(name_value)) => { |
| - name_value.path.is_ident("grammar") || name_value.path.is_ident("grammar_inline") |
| - } |
| - _ => false, |
| + .filter(|attr| { |
| + let path = attr.meta.path(); |
| + path.is_ident("grammar") || path.is_ident("grammar_inline") |
| }) |
| .collect(); |
| |
| @@ -148,9 +147,12 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) { |
| } |
| |
| fn get_attribute(attr: &Attribute) -> GrammarSource { |
| - match attr.parse_meta() { |
| - Ok(Meta::NameValue(name_value)) => match name_value.lit { |
| - Lit::Str(string) => { |
| + match &attr.meta { |
| + Meta::NameValue(name_value) => match &name_value.value { |
| + Expr::Lit(ExprLit { |
| + lit: Lit::Str(string), |
| + .. |
| + }) => { |
| if name_value.path.is_ident("grammar") { |
| GrammarSource::File(string.value()) |
| } else { |