Skip to content
This repository was archived by the owner on Apr 8, 2024. It is now read-only.

Commit 9d1984d

Browse files
authored
Merge pull request #36 from CAD97/gll-bump
Bump GLL
2 parents b44fcfd + fd68201 commit 9d1984d

File tree

5 files changed

+165
-147
lines changed

5 files changed

+165
-147
lines changed

Cargo.toml

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ authors = ["The Rust Project Developers"]
55
edition = "2018"
66

77
[dependencies]
8-
gll = { git = "https://github.com/rust-lang-nursery/gll" }
8+
gll = "0.0.2"
99
proc-macro2 = "0.4.0"
1010
structopt = "0.2.12"
1111
walkdir = "2.2.6"
@@ -21,7 +21,7 @@ regex = "1.1.6"
2121
lazy_static = "1.3.0"
2222

2323
[build-dependencies]
24-
gll = { git = "https://github.com/rust-lang-nursery/gll" }
24+
gll = "0.0.2"
2525
walkdir = "2.2.6"
2626

2727
[[bin]]
@@ -31,3 +31,7 @@ path = "src/bin/coverage.rs"
3131
[[bin]]
3232
name = "snapshots"
3333
path = "src/bin/snapshots.rs"
34+
35+
[patch.'crates-io']
36+
gll = { git = "https://github.com/rust-lang/gll", rev = "bc72820905eac498c1fb1cc644e931d63e7d07e3" }
37+
grammer = { git = "https://github.com/lykenware/grammer", rev = "eb47b51a9332c0e82d7c02d988e203d2a01f3654" }

build.rs

Lines changed: 9 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ use walkdir::WalkDir;
77
fn main() {
88
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
99

10-
1110
// FIXME(eddyb) streamline this process in `gll`.
1211

1312
// Find all the `.lyg` grammar fragments in `grammar/`.
@@ -18,23 +17,9 @@ fn main() {
1817
.filter(|entry| entry.path().extension().map_or(false, |ext| ext == "lyg"));
1918

2019
// Start with the builtin rules for proc-macro grammars.
21-
let mut grammar = gll::proc_macro::builtin();
22-
23-
// HACK(eddyb) inject a custom builtin - this should be upstreamed to gll!
24-
{
25-
use gll::proc_macro::{FlatTokenPat, Pat};
26-
27-
grammar.define(
28-
"LIFETIME",
29-
gll::grammar::eat(Pat(vec![
30-
FlatTokenPat::Punct {
31-
ch: Some('\''),
32-
joint: Some(true),
33-
},
34-
FlatTokenPat::Ident(None),
35-
])),
36-
);
37-
}
20+
let mut cx = gll::proc_macro::Context::new();
21+
let cx = &mut cx;
22+
let mut grammar = gll::proc_macro::builtin(cx);
3823

3924
// Add in each grammar fragment to the grammar.
4025
for fragment in fragments {
@@ -44,10 +29,14 @@ fn main() {
4429
println!("cargo:rerun-if-changed={}", path.display());
4530

4631
let src = fs::read_to_string(&path).unwrap();
47-
let fragment: gll::grammar::Grammar<_> = src.parse().unwrap();
32+
let fragment = gll::parse_grammar(cx, src.parse().unwrap()).unwrap();
4833
grammar.extend(fragment);
4934
}
5035

5136
// Generate a Rust parser from the combined grammar and write it out.
52-
fs::write(&out_dir.join("parse.rs"), grammar.generate_rust()).unwrap();
37+
fs::write(
38+
&out_dir.join("parse.rs"),
39+
gll::generate::rust::generate(cx, &grammar).to_rustfmt_or_pretty_string(),
40+
)
41+
.unwrap();
5342
}

src/bin/coverage.rs

Lines changed: 118 additions & 110 deletions
Original file line numberDiff line numberDiff line change
@@ -2,17 +2,22 @@
22

33
use std::{
44
collections::{BTreeSet, VecDeque},
5-
fs, io, io::prelude::*,
5+
fs, io,
6+
io::prelude::*,
67
path::{Path, PathBuf},
78
time::{Duration, Instant},
89
};
9-
use gll::runtime::{MoreThanOne, ParseNodeKind, ParseNodeShape};
10+
11+
use derive_more::Add;
12+
use gll::{
13+
forest::{MoreThanOne, GrammarReflector},
14+
parse_node::ParseNodeShape,
15+
};
1016
use rayon::prelude::*;
1117
use rust_grammar::parse;
18+
use serde::{Deserialize, Serialize};
1219
use structopt::StructOpt;
1320
use walkdir::WalkDir;
14-
use derive_more::Add;
15-
use serde::{Serialize, Deserialize};
1621

1722
#[derive(Debug, Default, Serialize, Deserialize)]
1823
struct Blacklist {
@@ -67,36 +72,44 @@ enum Command {
6772
},
6873
}
6974

70-
type ModuleContentsResult<'a, 'i> = parse::ParseResult<
71-
'a,
72-
'i,
73-
proc_macro2::TokenStream,
74-
parse::ModuleContents<'a, 'i, proc_macro2::TokenStream>,
75-
>;
75+
type ModuleContentsResult = Result<ModuleContentsHandle, Error<proc_macro2::Span>>;
7676

77-
type ModuleContentsHandle<'a, 'i> = parse::Handle<
78-
'a,
79-
'i,
77+
type ModuleContentsHandle = parse::OwnedHandle<
8078
proc_macro2::TokenStream,
81-
parse::ModuleContents<'a, 'i, proc_macro2::TokenStream>,
79+
parse::ModuleContents<'static, 'static, proc_macro2::TokenStream>,
8280
>;
8381

82+
enum Error<A> {
83+
Lex(proc_macro2::LexError),
84+
Parse(gll::parser::ParseError<A>),
85+
}
86+
87+
impl<A> From<proc_macro2::LexError> for Error<A> {
88+
fn from(error: proc_macro2::LexError) -> Self {
89+
Error::Lex(error)
90+
}
91+
}
92+
93+
impl<A> From<gll::parser::ParseError<A>> for Error<A> {
94+
fn from(error: gll::parser::ParseError<A>) -> Self {
95+
Error::Parse(error)
96+
}
97+
}
98+
8499
/// Read the contents of the file at the given `path`, parse it
85100
/// using the `ModuleContents` rule, and pass the result to `f`.
86-
fn parse_file_with<R>(path: &Path, f: impl FnOnce(ModuleContentsResult<'_, '_>) -> R) -> R {
101+
fn parse_file(path: &Path) -> ModuleContentsResult {
87102
let src = fs::read_to_string(path).unwrap();
88-
match src.parse::<proc_macro2::TokenStream>() {
89-
Ok(tts) => parse::ModuleContents::parse_with(tts, |_, result| f(result)),
90-
// FIXME(eddyb) provide more information in this error case.
91-
Err(_) => f(Err(parse::ParseError::NoParse)),
92-
}
103+
let tts = src.parse::<proc_macro2::TokenStream>()?;
104+
let res = parse::ModuleContents::parse(tts)?;
105+
Ok(res)
93106
}
94107

95108
/// Output the result of a single file to stderr,
96109
/// optionally prefixed by a given `path`.
97110
fn report_file_result(
98111
path: Option<&Path>,
99-
result: ModuleContentsResult<'_, '_>,
112+
result: &ModuleContentsResult,
100113
ambiguity_result: Result<(), MoreThanOne>,
101114
duration: Option<Duration>,
102115
) {
@@ -111,60 +124,62 @@ fn report_file_result(
111124
match (result, ambiguity_result) {
112125
(Ok(_), Ok(_)) => eprintln!("OK"),
113126
(Ok(_), Err(_)) => eprintln!("OK (ambiguous)"),
114-
(Err(parse::ParseError::TooShort(handle)), _) => {
127+
(Err(Error::Parse(error)), _) => {
115128
eprint!("FAIL after ");
116129

117130
#[cfg(procmacro2_semver_exempt)]
118131
{
119132
// HACK(eddyb) work around `proc-macro2` `Span` printing limitation
120-
let end_location = handle.source_info().end.end();
133+
let end_location = error.at.end();
121134
eprintln!("{}:{}", end_location.line, end_location.column);
122135
}
123136
#[cfg(not(procmacro2_semver_exempt))]
124137
{
125-
let _ = handle;
126138
eprintln!(
127139
"(missing location information; \
128140
set `RUSTFLAGS='--cfg procmacro2_semver_exempt'`)"
129141
);
130142
}
143+
eprintln!("Expected: {:?}", error.expected);
131144
}
132-
(Err(parse::ParseError::NoParse), _) => eprintln!("FAIL (lexer error?)"),
145+
(Err(Error::Lex(e)), _) => eprintln!("FAIL ({:?})", e),
133146
}
134147
}
135148

136-
fn ambiguity_check(handle: ModuleContentsHandle<'_, '_>) -> Result<(), MoreThanOne> {
137-
let sppf = &handle.parser.sppf;
149+
fn ambiguity_check(handle: &ModuleContentsHandle) -> Result<(), MoreThanOne> {
150+
handle.with(|handle| {
151+
let forest = &handle.forest;
138152

139-
let mut queue = VecDeque::new();
140-
queue.push_back(handle.node);
141-
let mut seen: BTreeSet<_> = queue.iter().cloned().collect();
153+
let mut queue = VecDeque::new();
154+
queue.push_back(handle.node);
155+
let mut seen: BTreeSet<_> = queue.iter().cloned().collect();
142156

143-
while let Some(source) = queue.pop_front() {
144-
let mut add_children = |children: &[_]| {
145-
for &child in children {
146-
if seen.insert(child) {
147-
queue.push_back(child);
157+
while let Some(source) = queue.pop_front() {
158+
let mut add_children = |children: &[_]| {
159+
for &child in children {
160+
if seen.insert(child) {
161+
queue.push_back(child);
162+
}
148163
}
149-
}
150-
};
151-
match source.kind.shape() {
152-
ParseNodeShape::Opaque => {}
153-
ParseNodeShape::Alias(_) => add_children(&[source.unpack_alias()]),
154-
ParseNodeShape::Opt(_) => {
155-
if let Some(child) = source.unpack_opt() {
156-
add_children(&[child]);
164+
};
165+
match forest.grammar.parse_node_shape(source.kind) {
166+
ParseNodeShape::Opaque => {}
167+
ParseNodeShape::Alias(_) => add_children(&[forest.unpack_alias(source)]),
168+
ParseNodeShape::Opt(_) => {
169+
if let Some(child) = forest.unpack_opt(source) {
170+
add_children(&[child]);
171+
}
172+
}
173+
ParseNodeShape::Choice => add_children(&[forest.one_choice(source)?]),
174+
ParseNodeShape::Split(..) => {
175+
let (left, right) = forest.one_split(source)?;
176+
add_children(&[left, right])
157177
}
158-
}
159-
ParseNodeShape::Choice => add_children(&[sppf.one_choice(source)?]),
160-
ParseNodeShape::Split(..) => {
161-
let (left, right) = sppf.one_split(source)?;
162-
add_children(&[left, right])
163178
}
164179
}
165-
}
166180

167-
Ok(())
181+
Ok(())
182+
})
168183
}
169184

170185
#[derive(Debug, Default, Add)]
@@ -199,30 +214,29 @@ fn process(file: walkdir::DirEntry, verbose: bool) -> ParseResult {
199214
let mut stdout = io::stdout();
200215
let path = file.into_path();
201216

202-
parse_file_with(&path, |result| {
203-
let mut ambiguity_result = Ok(());
204-
let start = Instant::now();
205-
let status = match result {
206-
Ok(handle) => {
207-
ambiguity_result = ambiguity_check(handle);
208-
if ambiguity_result.is_ok() {
209-
ParseResult::Unambiguous
210-
} else {
211-
ParseResult::Ambiguous
212-
}
217+
let result = parse_file(&path);
218+
let mut ambiguity_result = Ok(());
219+
let start = Instant::now();
220+
let status = match &result {
221+
Ok(handle) => {
222+
ambiguity_result = ambiguity_check(handle);
223+
if ambiguity_result.is_ok() {
224+
ParseResult::Unambiguous
225+
} else {
226+
ParseResult::Ambiguous
213227
}
214-
Err(parse::ParseError::TooShort(_)) => ParseResult::Partial,
215-
Err(parse::ParseError::NoParse) => ParseResult::Error,
216-
};
217-
let duration = start.elapsed();
218-
if verbose {
219-
report_file_result(Some(&path), result, ambiguity_result, Some(duration));
220-
} else {
221-
print!("{}", status.compact_display());
222-
stdout.flush().unwrap();
223228
}
224-
status
225-
})
229+
Err(Error::Parse(_)) => ParseResult::Partial,
230+
Err(Error::Lex(_)) => ParseResult::Error,
231+
};
232+
let duration = start.elapsed();
233+
if verbose {
234+
report_file_result(Some(&path), &result, ambiguity_result, Some(duration));
235+
} else {
236+
print!("{}", status.compact_display());
237+
stdout.flush().unwrap();
238+
}
239+
status
226240
}
227241

228242
fn print_statistics(counters: Counters) {
@@ -253,24 +267,21 @@ fn main() -> Result<(), failure::Error> {
253267
file,
254268
} => {
255269
// Not much to do, try to parse the file and report the result.
256-
parse_file_with(&file, |result| {
257-
let mut ambiguity_result = Ok(());
258-
match result {
259-
Ok(handle) | Err(parse::ParseError::TooShort(handle)) => {
260-
ambiguity_result = ambiguity_check(handle);
261-
262-
if let Some(out_path) = graphviz_forest {
263-
handle
264-
.parser
265-
.sppf
266-
.dump_graphviz(&mut fs::File::create(out_path).unwrap())
267-
.unwrap();
268-
}
269-
}
270-
Err(parse::ParseError::NoParse) => {}
270+
let result = parse_file(&file);
271+
let mut ambiguity_result = Ok(());
272+
if let Ok(handle) = &result {
273+
ambiguity_result = ambiguity_check(handle);
274+
275+
if let Some(out_path) = graphviz_forest {
276+
handle.with(|handle| {
277+
handle
278+
.forest
279+
.dump_graphviz(&mut fs::File::create(out_path).unwrap())
280+
.unwrap();
281+
})
271282
}
272-
report_file_result(None, result, ambiguity_result, None);
273-
});
283+
}
284+
report_file_result(None, &result, ambiguity_result, None);
274285
}
275286
Command::Dir { verbose, dir } => {
276287
let config = Config::load()?;
@@ -288,27 +299,24 @@ fn main() -> Result<(), failure::Error> {
288299
let counters: Counters = files
289300
.par_bridge()
290301
.map(|f| process(f, verbose))
291-
.fold(
292-
Counters::default,
293-
|mut acc, x| {
294-
acc.total_count += 1;
295-
match x {
296-
ParseResult::Ambiguous => {
297-
acc.ambiguous_count += 1;
298-
}
299-
ParseResult::Unambiguous => {
300-
acc.unambiguous_count += 1;
301-
}
302-
ParseResult::Partial => {
303-
acc.too_short_count += 1;
304-
}
305-
ParseResult::Error => {
306-
acc.no_parse_count += 1;
307-
}
308-
};
309-
acc
310-
},
311-
)
302+
.fold(Counters::default, |mut acc, x| {
303+
acc.total_count += 1;
304+
match x {
305+
ParseResult::Ambiguous => {
306+
acc.ambiguous_count += 1;
307+
}
308+
ParseResult::Unambiguous => {
309+
acc.unambiguous_count += 1;
310+
}
311+
ParseResult::Partial => {
312+
acc.too_short_count += 1;
313+
}
314+
ParseResult::Error => {
315+
acc.no_parse_count += 1;
316+
}
317+
};
318+
acc
319+
})
312320
.reduce(Counters::default, |a, b| a + b);
313321

314322
// We're done, time to print out stats!

0 commit comments

Comments
 (0)