|
| 1 | +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT |
| 2 | +// file at the top-level directory of this distribution and at |
| 3 | +// http://rust-lang.org/COPYRIGHT. |
| 4 | +// |
| 5 | +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
| 6 | +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
| 7 | +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
| 8 | +// option. This file may not be copied, modified, or distributed |
| 9 | +// except according to those terms. |
| 10 | + |
| 11 | +use {__internal, Delimiter, Spacing, Span, Term, TokenNode, TokenTree}; |
| 12 | + |
| 13 | +use syntax_pos::{self, SyntaxContext, FileName}; |
| 14 | +use syntax_pos::hygiene::Mark; |
| 15 | +use syntax::ast; |
| 16 | +use syntax::ext::base::{ExtCtxt, ProcMacro}; |
| 17 | +use syntax::parse::{self, token}; |
| 18 | +use syntax::tokenstream; |
| 19 | + |
| 20 | +pub struct Quoter; |
| 21 | + |
| 22 | +impl ProcMacro for Quoter { |
| 23 | + fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, |
| 24 | + _: syntax_pos::Span, |
| 25 | + stream: tokenstream::TokenStream) |
| 26 | + -> tokenstream::TokenStream { |
| 27 | + let expand_quoter = ::bridge::Expand1::new(&::quote::Quote::quote); |
| 28 | + |
| 29 | + let mut info = cx.current_expansion.mark.expn_info().unwrap(); |
| 30 | + info.callee.allow_internal_unstable = true; |
| 31 | + cx.current_expansion.mark.set_expn_info(info); |
| 32 | + __internal::set_sess(cx, || expand_quoter.run(Rustc, stream)) |
| 33 | + } |
| 34 | +} |
| 35 | + |
| 36 | +impl Delimiter { |
| 37 | + fn from_internal(delim: token::DelimToken) -> Delimiter { |
| 38 | + match delim { |
| 39 | + token::Paren => Delimiter::Parenthesis, |
| 40 | + token::Brace => Delimiter::Brace, |
| 41 | + token::Bracket => Delimiter::Bracket, |
| 42 | + token::NoDelim => Delimiter::None, |
| 43 | + } |
| 44 | + } |
| 45 | + |
| 46 | + fn to_internal(self) -> token::DelimToken { |
| 47 | + match self { |
| 48 | + Delimiter::Parenthesis => token::Paren, |
| 49 | + Delimiter::Brace => token::Brace, |
| 50 | + Delimiter::Bracket => token::Bracket, |
| 51 | + Delimiter::None => token::NoDelim, |
| 52 | + } |
| 53 | + } |
| 54 | +} |
| 55 | + |
| 56 | +pub struct Rustc; |
| 57 | +impl ::bridge::FrontendInterface for Rustc { |
| 58 | + type TokenStream = tokenstream::TokenStream; |
| 59 | + type TokenStreamBuilder = tokenstream::TokenStreamBuilder; |
| 60 | + type TokenCursor = tokenstream::Cursor; |
| 61 | + |
| 62 | + |
| 63 | + fn token_stream_empty(&self) -> Self::TokenStream { |
| 64 | + tokenstream::TokenStream::empty() |
| 65 | + } |
| 66 | + fn token_stream_is_empty(&self, stream: &Self::TokenStream) -> bool { |
| 67 | + stream.is_empty() |
| 68 | + } |
| 69 | + fn token_stream_from_str(&self, src: &str) -> Result<Self::TokenStream, ::LexError> { |
| 70 | + ::__internal::with_sess(|(sess, mark)| { |
| 71 | + let src = src.to_string(); |
| 72 | + let name = FileName::ProcMacroSourceCode; |
| 73 | + let expn_info = mark.expn_info().unwrap(); |
| 74 | + let call_site = expn_info.call_site; |
| 75 | + // notify the expansion info that it is unhygienic |
| 76 | + let mark = Mark::fresh(mark); |
| 77 | + mark.set_expn_info(expn_info); |
| 78 | + let span = call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)); |
| 79 | + Ok(parse::parse_stream_from_source_str(name, src, sess, Some(span))) |
| 80 | + }) |
| 81 | + } |
| 82 | + fn token_stream_delimited(&self, span: ::Span, |
| 83 | + delimiter: ::Delimiter, |
| 84 | + delimed: Self::TokenStream) |
| 85 | + -> Self::TokenStream { |
| 86 | + tokenstream::TokenTree::Delimited(span.0, tokenstream::Delimited { |
| 87 | + delim: delimiter.to_internal(), |
| 88 | + tts: delimed.into(), |
| 89 | + }).into() |
| 90 | + } |
| 91 | + fn token_stream_from_token_tree(&self, tree: TokenTree) -> Self::TokenStream { |
| 92 | + use syntax::parse::token::*; |
| 93 | + use syntax::tokenstream::TokenTree; |
| 94 | + |
| 95 | + let (op, kind) = match tree.kind { |
| 96 | + TokenNode::Op(op, kind) => (op, kind), |
| 97 | + TokenNode::Group(..) => unreachable!(), |
| 98 | + TokenNode::Term(symbol) => { |
| 99 | + let ident = ast::Ident { name: symbol.0, ctxt: tree.span.0.ctxt() }; |
| 100 | + let token = if symbol.0.as_str().starts_with("'") { |
| 101 | + Lifetime(ident) |
| 102 | + } else { |
| 103 | + Ident(ident) |
| 104 | + }; |
| 105 | + return TokenTree::Token(tree.span.0, token).into(); |
| 106 | + } |
| 107 | + TokenNode::Literal(token) => return TokenTree::Token(tree.span.0, token.0).into(), |
| 108 | + }; |
| 109 | + |
| 110 | + let token = match op { |
| 111 | + '=' => Eq, |
| 112 | + '<' => Lt, |
| 113 | + '>' => Gt, |
| 114 | + '!' => Not, |
| 115 | + '~' => Tilde, |
| 116 | + '+' => BinOp(Plus), |
| 117 | + '-' => BinOp(Minus), |
| 118 | + '*' => BinOp(Star), |
| 119 | + '/' => BinOp(Slash), |
| 120 | + '%' => BinOp(Percent), |
| 121 | + '^' => BinOp(Caret), |
| 122 | + '&' => BinOp(And), |
| 123 | + '|' => BinOp(Or), |
| 124 | + '@' => At, |
| 125 | + '.' => Dot, |
| 126 | + ',' => Comma, |
| 127 | + ';' => Semi, |
| 128 | + ':' => Colon, |
| 129 | + '#' => Pound, |
| 130 | + '$' => Dollar, |
| 131 | + '?' => Question, |
| 132 | + _ => panic!("unsupported character {}", op), |
| 133 | + }; |
| 134 | + |
| 135 | + let tree = TokenTree::Token(tree.span.0, token); |
| 136 | + match kind { |
| 137 | + Spacing::Alone => tree.into(), |
| 138 | + Spacing::Joint => tree.joint(), |
| 139 | + } |
| 140 | + } |
| 141 | + fn token_stream_to_token_tree(&self, stream: Self::TokenStream) |
| 142 | + -> Result<(::TokenTree, Option<Self::TokenStream>), |
| 143 | + (::Span, (::Delimiter, Self::TokenStream))> { |
| 144 | + use syntax::parse::token::*; |
| 145 | + |
| 146 | + let mut next = None; |
| 147 | + |
| 148 | + let (tree, is_joint) = stream.as_tree(); |
| 149 | + let (mut span, token) = match tree { |
| 150 | + tokenstream::TokenTree::Delimited(span, delimed) => { |
| 151 | + let delimiter = Delimiter::from_internal(delimed.delim); |
| 152 | + return Err((Span(span), (delimiter, delimed.tts.into()))); |
| 153 | + } |
| 154 | + tokenstream::TokenTree::Token(span, token) => (span, token), |
| 155 | + }; |
| 156 | + |
| 157 | + let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone }; |
| 158 | + macro_rules! op { |
| 159 | + ($op:expr) => { TokenNode::Op($op, op_kind) } |
| 160 | + } |
| 161 | + |
| 162 | + macro_rules! joint { |
| 163 | + ($first:expr, $rest:expr) => { |
| 164 | + joint($first, $rest, is_joint, &mut span, &mut next) |
| 165 | + } |
| 166 | + } |
| 167 | + |
| 168 | + fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span, |
| 169 | + next: &mut Option<tokenstream::TokenStream>) |
| 170 | + -> TokenNode { |
| 171 | + let (first_span, rest_span) = (*span, *span); |
| 172 | + *span = first_span; |
| 173 | + let tree = tokenstream::TokenTree::Token(rest_span, rest); |
| 174 | + *next = Some(if is_joint { tree.joint() } else { tree.into() }); |
| 175 | + TokenNode::Op(first, Spacing::Joint) |
| 176 | + } |
| 177 | + |
| 178 | + let kind = match token { |
| 179 | + Eq => op!('='), |
| 180 | + Lt => op!('<'), |
| 181 | + Le => joint!('<', Eq), |
| 182 | + EqEq => joint!('=', Eq), |
| 183 | + Ne => joint!('!', Eq), |
| 184 | + Ge => joint!('>', Eq), |
| 185 | + Gt => op!('>'), |
| 186 | + AndAnd => joint!('&', BinOp(And)), |
| 187 | + OrOr => joint!('|', BinOp(Or)), |
| 188 | + Not => op!('!'), |
| 189 | + Tilde => op!('~'), |
| 190 | + BinOp(Plus) => op!('+'), |
| 191 | + BinOp(Minus) => op!('-'), |
| 192 | + BinOp(Star) => op!('*'), |
| 193 | + BinOp(Slash) => op!('/'), |
| 194 | + BinOp(Percent) => op!('%'), |
| 195 | + BinOp(Caret) => op!('^'), |
| 196 | + BinOp(And) => op!('&'), |
| 197 | + BinOp(Or) => op!('|'), |
| 198 | + BinOp(Shl) => joint!('<', Lt), |
| 199 | + BinOp(Shr) => joint!('>', Gt), |
| 200 | + BinOpEq(Plus) => joint!('+', Eq), |
| 201 | + BinOpEq(Minus) => joint!('-', Eq), |
| 202 | + BinOpEq(Star) => joint!('*', Eq), |
| 203 | + BinOpEq(Slash) => joint!('/', Eq), |
| 204 | + BinOpEq(Percent) => joint!('%', Eq), |
| 205 | + BinOpEq(Caret) => joint!('^', Eq), |
| 206 | + BinOpEq(And) => joint!('&', Eq), |
| 207 | + BinOpEq(Or) => joint!('|', Eq), |
| 208 | + BinOpEq(Shl) => joint!('<', Le), |
| 209 | + BinOpEq(Shr) => joint!('>', Ge), |
| 210 | + At => op!('@'), |
| 211 | + Dot => op!('.'), |
| 212 | + DotDot => joint!('.', Dot), |
| 213 | + DotDotDot => joint!('.', DotDot), |
| 214 | + DotDotEq => joint!('.', DotEq), |
| 215 | + Comma => op!(','), |
| 216 | + Semi => op!(';'), |
| 217 | + Colon => op!(':'), |
| 218 | + ModSep => joint!(':', Colon), |
| 219 | + RArrow => joint!('-', Gt), |
| 220 | + LArrow => joint!('<', BinOp(Minus)), |
| 221 | + FatArrow => joint!('=', Gt), |
| 222 | + Pound => op!('#'), |
| 223 | + Dollar => op!('$'), |
| 224 | + Question => op!('?'), |
| 225 | + |
| 226 | + Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)), |
| 227 | + Literal(..) | DocComment(..) => TokenNode::Literal(::Literal(token)), |
| 228 | + |
| 229 | + Interpolated(_) => { |
| 230 | + return Err((Span(span), __internal::with_sess(|(sess, _)| { |
| 231 | + let tts = token.interpolated_to_tokenstream(sess, span); |
| 232 | + (Delimiter::None, tts) |
| 233 | + }))); |
| 234 | + } |
| 235 | + |
| 236 | + DotEq => joint!('.', Eq), |
| 237 | + OpenDelim(..) | CloseDelim(..) => unreachable!(), |
| 238 | + Whitespace | Comment | Shebang(..) | Eof => unreachable!(), |
| 239 | + }; |
| 240 | + |
| 241 | + Ok((TokenTree { span: Span(span), kind: kind }, next)) |
| 242 | + } |
| 243 | + fn token_stream_trees(&self, stream: Self::TokenStream) -> Self::TokenCursor { |
| 244 | + stream.trees() |
| 245 | + } |
| 246 | + |
| 247 | + fn token_stream_builder_new(&self) -> Self::TokenStreamBuilder { |
| 248 | + tokenstream::TokenStreamBuilder::new() |
| 249 | + } |
| 250 | + fn token_stream_builder_push(&self, builder: &mut Self::TokenStreamBuilder, |
| 251 | + stream: Self::TokenStream) { |
| 252 | + builder.push(stream); |
| 253 | + } |
| 254 | + fn token_stream_builder_build(&self, builder: Self::TokenStreamBuilder) |
| 255 | + -> Self::TokenStream { |
| 256 | + builder.build() |
| 257 | + } |
| 258 | + |
| 259 | + fn token_cursor_next(&self, cursor: &mut Self::TokenCursor) -> Option<Self::TokenStream> { |
| 260 | + while let Some(stream) = cursor.next_as_stream() { |
| 261 | + let (tree, _) = stream.clone().as_tree(); |
| 262 | + let span = tree.span(); |
| 263 | + if span != DUMMY_SP { |
| 264 | + return Some(stream); |
| 265 | + } |
| 266 | + let nested_stream = match tree { |
| 267 | + tokenstream::TokenTree::Delimited(_, tokenstream::Delimited { |
| 268 | + delim: token::NoDelim, |
| 269 | + tts |
| 270 | + }) => tts.into(), |
| 271 | + tokenstream::TokenTree::Token(_, token @ token::Interpolated(_)) => { |
| 272 | + __internal::with_sess(|(sess, _)| { |
| 273 | + token.interpolated_to_tokenstream(sess, span) |
| 274 | + }) |
| 275 | + } |
| 276 | + _ => return Some(stream) |
| 277 | + }; |
| 278 | + cursor.insert(nested_stream); |
| 279 | + } |
| 280 | + None |
| 281 | + } |
| 282 | +} |
0 commit comments