Skip to content

Commit d708d02

Browse files
committed
proc_macro: introduce frontend bridge and use it for TokenStream and its builders & cursors.
1 parent 75af15e commit d708d02

File tree

11 files changed

+973
-339
lines changed

11 files changed

+973
-339
lines changed

src/libproc_macro/bridge.rs

Lines changed: 547 additions & 0 deletions
Large diffs are not rendered by default.

src/libproc_macro/lib.rs

Lines changed: 54 additions & 265 deletions
Large diffs are not rendered by default.

src/libproc_macro/quote.rs

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,7 @@
1616
1717
use {Delimiter, Literal, Spacing, Span, Term, TokenNode, TokenStream, TokenTree};
1818

19-
use syntax::ext::base::{ExtCtxt, ProcMacro};
2019
use syntax::parse::token;
21-
use syntax::tokenstream;
22-
23-
pub struct Quoter;
2420

2521
pub fn unquote<T: Into<TokenStream> + Clone>(tokens: &T) -> TokenStream {
2622
T::into(tokens.clone())
@@ -67,18 +63,6 @@ macro_rules! quote {
6763
};
6864
}
6965

70-
impl ProcMacro for Quoter {
71-
fn expand<'cx>(&self, cx: &'cx mut ExtCtxt,
72-
_: ::syntax_pos::Span,
73-
stream: tokenstream::TokenStream)
74-
-> tokenstream::TokenStream {
75-
let mut info = cx.current_expansion.mark.expn_info().unwrap();
76-
info.callee.allow_internal_unstable = true;
77-
cx.current_expansion.mark.set_expn_info(info);
78-
::__internal::set_sess(cx, || TokenStream(stream).quote().0)
79-
}
80-
}
81-
8266
impl<T: Quote> Quote for Option<T> {
8367
fn quote(self) -> TokenStream {
8468
match self {

src/libproc_macro/rustc.rs

Lines changed: 282 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,282 @@
1+
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
use {__internal, Delimiter, Spacing, Span, Term, TokenNode, TokenTree};
12+
13+
use syntax_pos::{self, SyntaxContext, FileName};
14+
use syntax_pos::hygiene::Mark;
15+
use syntax::ast;
16+
use syntax::ext::base::{ExtCtxt, ProcMacro};
17+
use syntax::parse::{self, token};
18+
use syntax::tokenstream;
19+
20+
pub struct Quoter;
21+
22+
impl ProcMacro for Quoter {
23+
fn expand<'cx>(&self, cx: &'cx mut ExtCtxt,
24+
_: syntax_pos::Span,
25+
stream: tokenstream::TokenStream)
26+
-> tokenstream::TokenStream {
27+
let expand_quoter = ::bridge::Expand1::new(&::quote::Quote::quote);
28+
29+
let mut info = cx.current_expansion.mark.expn_info().unwrap();
30+
info.callee.allow_internal_unstable = true;
31+
cx.current_expansion.mark.set_expn_info(info);
32+
__internal::set_sess(cx, || expand_quoter.run(Rustc, stream))
33+
}
34+
}
35+
36+
impl Delimiter {
37+
fn from_internal(delim: token::DelimToken) -> Delimiter {
38+
match delim {
39+
token::Paren => Delimiter::Parenthesis,
40+
token::Brace => Delimiter::Brace,
41+
token::Bracket => Delimiter::Bracket,
42+
token::NoDelim => Delimiter::None,
43+
}
44+
}
45+
46+
fn to_internal(self) -> token::DelimToken {
47+
match self {
48+
Delimiter::Parenthesis => token::Paren,
49+
Delimiter::Brace => token::Brace,
50+
Delimiter::Bracket => token::Bracket,
51+
Delimiter::None => token::NoDelim,
52+
}
53+
}
54+
}
55+
56+
pub struct Rustc;
57+
impl ::bridge::FrontendInterface for Rustc {
58+
type TokenStream = tokenstream::TokenStream;
59+
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
60+
type TokenCursor = tokenstream::Cursor;
61+
62+
63+
fn token_stream_empty(&self) -> Self::TokenStream {
64+
tokenstream::TokenStream::empty()
65+
}
66+
fn token_stream_is_empty(&self, stream: &Self::TokenStream) -> bool {
67+
stream.is_empty()
68+
}
69+
fn token_stream_from_str(&self, src: &str) -> Result<Self::TokenStream, ::LexError> {
70+
::__internal::with_sess(|(sess, mark)| {
71+
let src = src.to_string();
72+
let name = FileName::ProcMacroSourceCode;
73+
let expn_info = mark.expn_info().unwrap();
74+
let call_site = expn_info.call_site;
75+
// notify the expansion info that it is unhygienic
76+
let mark = Mark::fresh(mark);
77+
mark.set_expn_info(expn_info);
78+
let span = call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark));
79+
Ok(parse::parse_stream_from_source_str(name, src, sess, Some(span)))
80+
})
81+
}
82+
fn token_stream_delimited(&self, span: ::Span,
83+
delimiter: ::Delimiter,
84+
delimed: Self::TokenStream)
85+
-> Self::TokenStream {
86+
tokenstream::TokenTree::Delimited(span.0, tokenstream::Delimited {
87+
delim: delimiter.to_internal(),
88+
tts: delimed.into(),
89+
}).into()
90+
}
91+
fn token_stream_from_token_tree(&self, tree: TokenTree) -> Self::TokenStream {
92+
use syntax::parse::token::*;
93+
use syntax::tokenstream::TokenTree;
94+
95+
let (op, kind) = match tree.kind {
96+
TokenNode::Op(op, kind) => (op, kind),
97+
TokenNode::Group(..) => unreachable!(),
98+
TokenNode::Term(symbol) => {
99+
let ident = ast::Ident { name: symbol.0, ctxt: tree.span.0.ctxt() };
100+
let token = if symbol.0.as_str().starts_with("'") {
101+
Lifetime(ident)
102+
} else {
103+
Ident(ident)
104+
};
105+
return TokenTree::Token(tree.span.0, token).into();
106+
}
107+
TokenNode::Literal(token) => return TokenTree::Token(tree.span.0, token.0).into(),
108+
};
109+
110+
let token = match op {
111+
'=' => Eq,
112+
'<' => Lt,
113+
'>' => Gt,
114+
'!' => Not,
115+
'~' => Tilde,
116+
'+' => BinOp(Plus),
117+
'-' => BinOp(Minus),
118+
'*' => BinOp(Star),
119+
'/' => BinOp(Slash),
120+
'%' => BinOp(Percent),
121+
'^' => BinOp(Caret),
122+
'&' => BinOp(And),
123+
'|' => BinOp(Or),
124+
'@' => At,
125+
'.' => Dot,
126+
',' => Comma,
127+
';' => Semi,
128+
':' => Colon,
129+
'#' => Pound,
130+
'$' => Dollar,
131+
'?' => Question,
132+
_ => panic!("unsupported character {}", op),
133+
};
134+
135+
let tree = TokenTree::Token(tree.span.0, token);
136+
match kind {
137+
Spacing::Alone => tree.into(),
138+
Spacing::Joint => tree.joint(),
139+
}
140+
}
141+
fn token_stream_to_token_tree(&self, stream: Self::TokenStream)
142+
-> Result<(::TokenTree, Option<Self::TokenStream>),
143+
(::Span, (::Delimiter, Self::TokenStream))> {
144+
use syntax::parse::token::*;
145+
146+
let mut next = None;
147+
148+
let (tree, is_joint) = stream.as_tree();
149+
let (mut span, token) = match tree {
150+
tokenstream::TokenTree::Delimited(span, delimed) => {
151+
let delimiter = Delimiter::from_internal(delimed.delim);
152+
return Err((Span(span), (delimiter, delimed.tts.into())));
153+
}
154+
tokenstream::TokenTree::Token(span, token) => (span, token),
155+
};
156+
157+
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
158+
macro_rules! op {
159+
($op:expr) => { TokenNode::Op($op, op_kind) }
160+
}
161+
162+
macro_rules! joint {
163+
($first:expr, $rest:expr) => {
164+
joint($first, $rest, is_joint, &mut span, &mut next)
165+
}
166+
}
167+
168+
fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span,
169+
next: &mut Option<tokenstream::TokenStream>)
170+
-> TokenNode {
171+
let (first_span, rest_span) = (*span, *span);
172+
*span = first_span;
173+
let tree = tokenstream::TokenTree::Token(rest_span, rest);
174+
*next = Some(if is_joint { tree.joint() } else { tree.into() });
175+
TokenNode::Op(first, Spacing::Joint)
176+
}
177+
178+
let kind = match token {
179+
Eq => op!('='),
180+
Lt => op!('<'),
181+
Le => joint!('<', Eq),
182+
EqEq => joint!('=', Eq),
183+
Ne => joint!('!', Eq),
184+
Ge => joint!('>', Eq),
185+
Gt => op!('>'),
186+
AndAnd => joint!('&', BinOp(And)),
187+
OrOr => joint!('|', BinOp(Or)),
188+
Not => op!('!'),
189+
Tilde => op!('~'),
190+
BinOp(Plus) => op!('+'),
191+
BinOp(Minus) => op!('-'),
192+
BinOp(Star) => op!('*'),
193+
BinOp(Slash) => op!('/'),
194+
BinOp(Percent) => op!('%'),
195+
BinOp(Caret) => op!('^'),
196+
BinOp(And) => op!('&'),
197+
BinOp(Or) => op!('|'),
198+
BinOp(Shl) => joint!('<', Lt),
199+
BinOp(Shr) => joint!('>', Gt),
200+
BinOpEq(Plus) => joint!('+', Eq),
201+
BinOpEq(Minus) => joint!('-', Eq),
202+
BinOpEq(Star) => joint!('*', Eq),
203+
BinOpEq(Slash) => joint!('/', Eq),
204+
BinOpEq(Percent) => joint!('%', Eq),
205+
BinOpEq(Caret) => joint!('^', Eq),
206+
BinOpEq(And) => joint!('&', Eq),
207+
BinOpEq(Or) => joint!('|', Eq),
208+
BinOpEq(Shl) => joint!('<', Le),
209+
BinOpEq(Shr) => joint!('>', Ge),
210+
At => op!('@'),
211+
Dot => op!('.'),
212+
DotDot => joint!('.', Dot),
213+
DotDotDot => joint!('.', DotDot),
214+
DotDotEq => joint!('.', DotEq),
215+
Comma => op!(','),
216+
Semi => op!(';'),
217+
Colon => op!(':'),
218+
ModSep => joint!(':', Colon),
219+
RArrow => joint!('-', Gt),
220+
LArrow => joint!('<', BinOp(Minus)),
221+
FatArrow => joint!('=', Gt),
222+
Pound => op!('#'),
223+
Dollar => op!('$'),
224+
Question => op!('?'),
225+
226+
Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
227+
Literal(..) | DocComment(..) => TokenNode::Literal(::Literal(token)),
228+
229+
Interpolated(_) => {
230+
return Err((Span(span), __internal::with_sess(|(sess, _)| {
231+
let tts = token.interpolated_to_tokenstream(sess, span);
232+
(Delimiter::None, tts)
233+
})));
234+
}
235+
236+
DotEq => joint!('.', Eq),
237+
OpenDelim(..) | CloseDelim(..) => unreachable!(),
238+
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
239+
};
240+
241+
Ok((TokenTree { span: Span(span), kind: kind }, next))
242+
}
243+
fn token_stream_trees(&self, stream: Self::TokenStream) -> Self::TokenCursor {
244+
stream.trees()
245+
}
246+
247+
fn token_stream_builder_new(&self) -> Self::TokenStreamBuilder {
248+
tokenstream::TokenStreamBuilder::new()
249+
}
250+
fn token_stream_builder_push(&self, builder: &mut Self::TokenStreamBuilder,
251+
stream: Self::TokenStream) {
252+
builder.push(stream);
253+
}
254+
fn token_stream_builder_build(&self, builder: Self::TokenStreamBuilder)
255+
-> Self::TokenStream {
256+
builder.build()
257+
}
258+
259+
fn token_cursor_next(&self, cursor: &mut Self::TokenCursor) -> Option<Self::TokenStream> {
260+
while let Some(stream) = cursor.next_as_stream() {
261+
let (tree, _) = stream.clone().as_tree();
262+
let span = tree.span();
263+
if span != DUMMY_SP {
264+
return Some(stream);
265+
}
266+
let nested_stream = match tree {
267+
tokenstream::TokenTree::Delimited(_, tokenstream::Delimited {
268+
delim: token::NoDelim,
269+
tts
270+
}) => tts.into(),
271+
tokenstream::TokenTree::Token(_, token @ token::Interpolated(_)) => {
272+
__internal::with_sess(|(sess, _)| {
273+
token.interpolated_to_tokenstream(sess, span)
274+
})
275+
}
276+
_ => return Some(stream)
277+
};
278+
cursor.insert(nested_stream);
279+
}
280+
None
281+
}
282+
}

src/librustc_metadata/creader.rs

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -514,8 +514,7 @@ impl<'a> CrateLoader<'a> {
514514
fn load_derive_macros(&mut self, root: &CrateRoot, dylib: Option<PathBuf>, span: Span)
515515
-> Vec<(ast::Name, Lrc<SyntaxExtension>)> {
516516
use std::{env, mem};
517-
use proc_macro::TokenStream;
518-
use proc_macro::__internal::Registry;
517+
use proc_macro::bridge::{Registry, Expand1, Expand2};
519518
use dynamic_lib::DynamicLibrary;
520519
use syntax_ext::deriving::custom::ProcMacroDerive;
521520
use syntax_ext::proc_macro_impl::{AttrProcMacro, BangProcMacro};
@@ -545,7 +544,7 @@ impl<'a> CrateLoader<'a> {
545544
impl Registry for MyRegistrar {
546545
fn register_custom_derive(&mut self,
547546
trait_name: &str,
548-
expand: fn(TokenStream) -> TokenStream,
547+
expand: Expand1,
549548
attributes: &[&'static str]) {
550549
let attrs = attributes.iter().cloned().map(Symbol::intern).collect::<Vec<_>>();
551550
let derive = ProcMacroDerive::new(expand, attrs.clone());
@@ -555,7 +554,7 @@ impl<'a> CrateLoader<'a> {
555554

556555
fn register_attr_proc_macro(&mut self,
557556
name: &str,
558-
expand: fn(TokenStream, TokenStream) -> TokenStream) {
557+
expand: Expand2) {
559558
let expand = SyntaxExtension::AttrProcMacro(
560559
Box::new(AttrProcMacro { inner: expand })
561560
);
@@ -564,7 +563,7 @@ impl<'a> CrateLoader<'a> {
564563

565564
fn register_bang_proc_macro(&mut self,
566565
name: &str,
567-
expand: fn(TokenStream) -> TokenStream) {
566+
expand: Expand1) {
568567
let expand = SyntaxExtension::ProcMacro(
569568
Box::new(BangProcMacro { inner: expand })
570569
);

src/librustc_metadata/cstore_impl.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,7 @@ impl CrateStore for cstore::CStore {
502502
return LoadedMacro::ProcMacro(proc_macros[id.index.to_proc_macro_index()].1.clone());
503503
} else if data.name == "proc_macro" &&
504504
self.get_crate_data(id.krate).item_name(id.index) == "quote" {
505-
let ext = SyntaxExtension::ProcMacro(Box::new(::proc_macro::__internal::Quoter));
505+
let ext = SyntaxExtension::ProcMacro(Box::new(::proc_macro::rustc::Quoter));
506506
return LoadedMacro::ProcMacro(Lrc::new(ext));
507507
}
508508

0 commit comments

Comments
 (0)