Skip to content

Commit 8fba851

Browse files
committed
Rewrite line_docs generator by use HashMap with rule_name.
1 parent f14a957 commit 8fba851

File tree

3 files changed

+104
-82
lines changed

3 files changed

+104
-82
lines changed

generator/src/generator.rs

Lines changed: 46 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
// option. All files in the project carrying such notice may not be copied,
88
// modified, or distributed except according to those terms.
99

10+
use std::collections::HashMap;
1011
use std::path::PathBuf;
1112

1213
use proc_macro2::TokenStream;
@@ -18,17 +19,31 @@ use pest_meta::ast::*;
1819
use pest_meta::optimizer::*;
1920

2021
#[derive(Debug)]
21-
pub(crate) struct DocComment<'a> {
22-
pub(crate) grammar_docs: Vec<&'a str>,
23-
pub(crate) line_docs: Vec<Vec<&'a str>>,
24-
pub(crate) rules: Vec<Rule>,
22+
pub(crate) struct DocComment {
23+
/// Multi-line grammar doc, (joined with `\n`)
24+
///
25+
/// e.g.
26+
///
27+
/// ```ignore
28+
/// "grammar doc 1\ngrammar doc 2"
29+
/// ```
30+
grammar_doc: String,
31+
/// HashMap rule name and doc comments (joined with `\n`)
32+
///
33+
/// e.g.
34+
///
35+
/// ```ignore
36+
/// { "foo": "line doc 1\nline doc 2", "bar": "line doc 3" }
37+
/// ```
38+
line_docs: HashMap<String, String>,
2539
}
2640

27-
impl DocComment<'_> {
28-
fn line_docs_for_rule(&self, rule_name: &str) -> Option<String> {
29-
let idx = self.rules.iter().position(|r| r.name == rule_name)?;
30-
31-
self.line_docs.get(idx).map(|comments| comments.join("\n"))
41+
impl DocComment {
42+
pub fn new(grammar_doc: String, line_docs: HashMap<String, String>) -> Self {
43+
Self {
44+
grammar_doc,
45+
line_docs,
46+
}
3247
}
3348
}
3449

@@ -38,7 +53,7 @@ pub(crate) fn generate(
3853
path: Option<PathBuf>,
3954
rules: Vec<OptimizedRule>,
4055
defaults: Vec<&str>,
41-
doc_comment: &DocComment<'_>,
56+
doc_comment: &DocComment,
4257
include_grammar: bool,
4358
) -> TokenStream {
4459
let uses_eoi = defaults.iter().any(|name| *name == "EOI");
@@ -197,32 +212,25 @@ fn generate_include(name: &Ident, path: &str) -> TokenStream {
197212
}
198213
}
199214

200-
fn generate_enum(
201-
rules: &[OptimizedRule],
202-
doc_comment: &DocComment<'_>,
203-
uses_eoi: bool,
204-
) -> TokenStream {
215+
fn generate_enum(rules: &[OptimizedRule], doc_comment: &DocComment, uses_eoi: bool) -> TokenStream {
205216
let rules = rules.iter().map(|rule| {
206217
let rule_name = format_ident!("r#{}", rule.name);
207218

208-
let comments = doc_comment.line_docs_for_rule(&rule.name);
209-
let comments = comments.unwrap_or_else(|| "".to_owned());
210-
if comments.is_empty() {
211-
quote! {
219+
match doc_comment.line_docs.get(&rule.name) {
220+
Some(doc) => quote! {
221+
#[doc = #doc]
212222
#rule_name
213-
}
214-
} else {
215-
quote! {
216-
#[doc = #comments]
223+
},
224+
None => quote! {
217225
#rule_name
218-
}
226+
},
219227
}
220228
});
221229

222-
let grammar_docs = doc_comment.grammar_docs.join("\n");
230+
let grammar_doc = &doc_comment.grammar_doc;
223231
if uses_eoi {
224232
quote! {
225-
#[doc = #grammar_docs]
233+
#[doc = #grammar_doc]
226234
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
227235
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
228236
pub enum Rule {
@@ -232,7 +240,7 @@ fn generate_enum(
232240
}
233241
} else {
234242
quote! {
235-
#[doc = #grammar_docs]
243+
#[doc = #grammar_doc]
236244
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
237245
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
238246
pub enum Rule {
@@ -709,14 +717,12 @@ mod tests {
709717
expr: OptimizedExpr::Ident("g".to_owned()),
710718
}];
711719

720+
let mut line_docs = HashMap::new();
721+
line_docs.insert("f".to_owned(), "This is rule comment".to_owned());
722+
712723
let doc_comment = &DocComment {
713-
grammar_docs: vec!["Rule doc", "hello"],
714-
line_docs: vec![vec!["This is rule comment"]],
715-
rules: vec![Rule {
716-
name: "f".to_owned(),
717-
ty: RuleType::Normal,
718-
expr: Expr::Ident("g".to_owned()),
719-
}],
724+
grammar_doc: "Rule doc\nhello".to_owned(),
725+
line_docs,
720726
};
721727

722728
assert_eq!(
@@ -1009,7 +1015,7 @@ mod tests {
10091015
}
10101016

10111017
#[test]
1012-
fn generate_complete() {
1018+
fn test_generate_complete() {
10131019
let name = Ident::new("MyParser", Span::call_site());
10141020
let generics = Generics::default();
10151021

@@ -1026,21 +1032,12 @@ mod tests {
10261032
},
10271033
];
10281034

1035+
let mut line_docs = HashMap::new();
1036+
line_docs.insert("if".to_owned(), "If statement".to_owned());
1037+
10291038
let doc_comment = &DocComment {
1030-
line_docs: vec![vec![], vec!["If statement"]],
1031-
grammar_docs: vec!["This is Rule doc", "This is second line"],
1032-
rules: vec![
1033-
Rule {
1034-
name: "a".to_owned(),
1035-
ty: RuleType::Silent,
1036-
expr: Expr::Str("b".to_owned()),
1037-
},
1038-
Rule {
1039-
name: "if".to_owned(),
1040-
ty: RuleType::Silent,
1041-
expr: Expr::Str("b".to_owned()),
1042-
},
1043-
],
1039+
line_docs,
1040+
grammar_doc: "This is Rule doc\nThis is second line".to_owned(),
10441041
};
10451042

10461043
let defaults = vec!["ANY"];

generator/src/lib.rs

Lines changed: 57 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
#[macro_use]
2222
extern crate quote;
2323

24+
use std::collections::HashMap;
2425
use std::env;
2526
use std::fs::File;
2627
use std::io::{self, Read};
@@ -93,31 +94,28 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
9394
Err(error) => panic!("error parsing \n{}", error.renamed_rules(rename_meta_rule)),
9495
};
9596

96-
let grammar_docs = consume_grammar_doc(pairs.clone());
97+
let grammar_doc = consume_grammar_doc(pairs.clone());
9798
let line_docs = consume_line_docs(pairs.clone());
9899

99100
let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
100101
let ast = unwrap_or_report(parser::consume_rules(pairs));
101-
let optimized = optimizer::optimize(ast.clone());
102+
let optimized = optimizer::optimize(ast);
102103

103-
let doc_comment = DocComment {
104-
grammar_docs,
105-
line_docs,
106-
rules: ast,
107-
};
104+
let doc_comment = &DocComment::new(grammar_doc, line_docs);
108105

109106
generator::generate(
110107
name,
111108
&generics,
112109
path,
113110
optimized,
114111
defaults,
115-
&doc_comment,
112+
doc_comment,
116113
include_grammar,
117114
)
118115
}
119116

120-
fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> Vec<&'_ str> {
117+
/// Consume grammar doc into String, multi-line joined with `\n`
118+
fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> String {
121119
let mut docs = vec![];
122120
for pair in pairs {
123121
if pair.as_rule() == Rule::grammar_doc {
@@ -126,31 +124,52 @@ fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> Vec<&'_ str> {
126124
}
127125
}
128126

129-
docs
127+
docs.join("\n")
130128
}
131129

132-
fn consume_line_docs(pairs: Pairs<'_, Rule>) -> Vec<Vec<&'_ str>> {
133-
let mut docs = vec![];
130+
/// Consume line docs into HashMap<rule_name, doc>
131+
///
132+
/// Example a `test.pest`:
133+
///
134+
/// ```ignore
135+
/// /// Line doc 1
136+
/// foo = {}
137+
///
138+
/// /// Line doc 2
139+
/// /// Line doc 3
140+
/// bar = {}
141+
/// ```
142+
///
143+
/// Will returns `{ "foo": "This is line comment", "bar": "Line doc 2\n/// Line doc 3" }`
144+
fn consume_line_docs(pairs: Pairs<'_, Rule>) -> HashMap<String, String> {
145+
let mut docs: HashMap<String, String> = HashMap::new();
134146
let mut comments = vec![];
135147

136148
for pair in pairs {
137-
if pair.as_rule() == Rule::grammar_rule {
149+
let rule = pair.as_rule();
150+
151+
if rule == Rule::grammar_rule {
138152
if let Some(inner) = pair.into_inner().next() {
139-
if inner.as_rule() == Rule::line_doc {
140-
let inner_doc = inner.into_inner().next().unwrap();
141-
comments.push(inner_doc.as_str());
142-
continue;
143-
} else {
144-
docs.push(comments);
145-
comments = vec![];
153+
// grammar_rule > line_doc | identifier
154+
match inner.as_rule() {
155+
Rule::line_doc => {
156+
// line_doc > inner_doc
157+
match inner.into_inner().next() {
158+
Some(inner_doc) => comments.push(inner_doc.as_str()),
159+
None => (),
160+
}
161+
}
162+
Rule::identifier => {
163+
if !comments.is_empty() {
164+
let rule_name = inner.as_str().to_owned();
165+
docs.insert(rule_name, comments.join("\n"));
166+
comments = vec![];
167+
}
168+
}
169+
_ => (),
146170
}
147171
}
148172
}
149-
150-
if !comments.is_empty() {
151-
docs.push(comments);
152-
comments = vec![];
153-
}
154173
}
155174

156175
docs
@@ -214,6 +233,8 @@ fn get_attribute(attr: &Attribute) -> GrammarSource {
214233

215234
#[cfg(test)]
216235
mod tests {
236+
use std::collections::HashMap;
237+
217238
use super::consume_line_docs;
218239
use super::parse_derive;
219240
use super::GrammarSource;
@@ -296,15 +317,18 @@ mod tests {
296317
};
297318

298319
let line_docs = consume_line_docs(pairs);
299-
assert_eq!(
300-
vec![
301-
vec!["Matches foo str, e.g.: `foo`"],
302-
vec!["Matches bar str,", " Indent 2, e.g: `bar` or `foobar`"],
303-
vec![],
304-
vec!["Matches dar", "Match dar description"]
305-
],
306-
line_docs
320+
321+
let mut expected = HashMap::new();
322+
expected.insert("foo".to_owned(), "Matches foo str, e.g.: `foo`".to_owned());
323+
expected.insert(
324+
"bar".to_owned(),
325+
"Matches bar str,\n Indent 2, e.g: `bar` or `foobar`".to_owned(),
326+
);
327+
expected.insert(
328+
"dar".to_owned(),
329+
"Matches dar\nMatch dar description".to_owned(),
307330
);
331+
assert_eq!(expected, line_docs);
308332
}
309333

310334
#[test]

grammars/src/grammars/json.pest

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
// modified, or distributed except according to those terms.
99

1010
//! A parser for JSON file.
11+
//!
1112
//! And this is a example for JSON parser.
1213
json = { SOI ~ (object | array) ~ EOI }
1314

0 commit comments

Comments
 (0)