Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit e009cdc

Browse files
Move token_stream to separate module
1 parent 315b0a7 commit e009cdc

File tree

2 files changed

+188
-224
lines changed

2 files changed

+188
-224
lines changed

crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs

Lines changed: 6 additions & 224 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,14 @@
1010
1111
use super::proc_macro::bridge::{self, server};
1212

13+
mod token_stream;
14+
pub use token_stream::*;
15+
16+
use std::ascii;
1317
use std::collections::HashMap;
1418
use std::hash::Hash;
1519
use std::iter::FromIterator;
1620
use std::ops::Bound;
17-
use std::{ascii, vec::IntoIter};
1821

1922
type Group = tt::Subtree;
2023
type TokenTree = tt::TokenTree;
@@ -23,80 +26,6 @@ type Spacing = tt::Spacing;
2326
type Literal = tt::Literal;
2427
type Span = tt::TokenId;
2528

26-
#[derive(Debug, Default, Clone)]
27-
pub struct TokenStream {
28-
pub token_trees: Vec<TokenTree>,
29-
}
30-
31-
impl TokenStream {
32-
pub fn new() -> Self {
33-
TokenStream::default()
34-
}
35-
36-
pub fn with_subtree(subtree: tt::Subtree) -> Self {
37-
if subtree.delimiter.is_some() {
38-
TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
39-
} else {
40-
TokenStream { token_trees: subtree.token_trees }
41-
}
42-
}
43-
44-
pub fn into_subtree(self) -> tt::Subtree {
45-
tt::Subtree { delimiter: None, token_trees: self.token_trees }
46-
}
47-
48-
pub fn is_empty(&self) -> bool {
49-
self.token_trees.is_empty()
50-
}
51-
}
52-
53-
/// Creates a token stream containing a single token tree.
54-
impl From<TokenTree> for TokenStream {
55-
fn from(tree: TokenTree) -> TokenStream {
56-
TokenStream { token_trees: vec![tree] }
57-
}
58-
}
59-
60-
/// Collects a number of token trees into a single stream.
61-
impl FromIterator<TokenTree> for TokenStream {
62-
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
63-
trees.into_iter().map(TokenStream::from).collect()
64-
}
65-
}
66-
67-
/// A "flattening" operation on token streams, collects token trees
68-
/// from multiple token streams into a single stream.
69-
impl FromIterator<TokenStream> for TokenStream {
70-
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
71-
let mut builder = TokenStreamBuilder::new();
72-
streams.into_iter().for_each(|stream| builder.push(stream));
73-
builder.build()
74-
}
75-
}
76-
77-
impl Extend<TokenTree> for TokenStream {
78-
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
79-
self.extend(trees.into_iter().map(TokenStream::from));
80-
}
81-
}
82-
83-
impl Extend<TokenStream> for TokenStream {
84-
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
85-
for item in streams {
86-
for tkn in item {
87-
match tkn {
88-
tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
89-
self.token_trees.extend(subtree.token_trees);
90-
}
91-
_ => {
92-
self.token_trees.push(tkn);
93-
}
94-
}
95-
}
96-
}
97-
}
98-
}
99-
10029
#[derive(Clone)]
10130
pub struct SourceFile {
10231
// FIXME stub
@@ -158,130 +87,21 @@ impl IdentInterner {
15887
}
15988
}
16089

161-
pub struct TokenStreamBuilder {
162-
acc: TokenStream,
163-
}
164-
165-
/// Public implementation details for the `TokenStream` type, such as iterators.
166-
pub mod token_stream {
167-
use std::str::FromStr;
168-
169-
use super::{TokenStream, TokenTree};
170-
171-
/// An iterator over `TokenStream`'s `TokenTree`s.
172-
/// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
173-
/// and returns whole groups as token trees.
174-
impl IntoIterator for TokenStream {
175-
type Item = TokenTree;
176-
type IntoIter = super::IntoIter<TokenTree>;
177-
178-
fn into_iter(self) -> Self::IntoIter {
179-
self.token_trees.into_iter()
180-
}
181-
}
182-
183-
type LexError = String;
184-
185-
/// Attempts to break the string into tokens and parse those tokens into a token stream.
186-
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
187-
/// or characters not existing in the language.
188-
/// All tokens in the parsed stream get `Span::call_site()` spans.
189-
///
190-
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
191-
/// change these errors into `LexError`s later.
192-
impl FromStr for TokenStream {
193-
type Err = LexError;
194-
195-
fn from_str(src: &str) -> Result<TokenStream, LexError> {
196-
let (subtree, _token_map) =
197-
mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
198-
199-
let subtree = subtree_replace_token_ids_with_unspecified(subtree);
200-
Ok(TokenStream::with_subtree(subtree))
201-
}
202-
}
203-
204-
impl ToString for TokenStream {
205-
fn to_string(&self) -> String {
206-
tt::pretty(&self.token_trees)
207-
}
208-
}
209-
210-
fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
211-
tt::Subtree {
212-
delimiter: subtree
213-
.delimiter
214-
.map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
215-
token_trees: subtree
216-
.token_trees
217-
.into_iter()
218-
.map(token_tree_replace_token_ids_with_unspecified)
219-
.collect(),
220-
}
221-
}
222-
223-
fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
224-
match tt {
225-
tt::TokenTree::Leaf(leaf) => {
226-
tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
227-
}
228-
tt::TokenTree::Subtree(subtree) => {
229-
tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
230-
}
231-
}
232-
}
233-
234-
fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
235-
match leaf {
236-
tt::Leaf::Literal(lit) => {
237-
tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
238-
}
239-
tt::Leaf::Punct(punct) => {
240-
tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
241-
}
242-
tt::Leaf::Ident(ident) => {
243-
tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
244-
}
245-
}
246-
}
247-
}
248-
249-
impl TokenStreamBuilder {
250-
fn new() -> TokenStreamBuilder {
251-
TokenStreamBuilder { acc: TokenStream::new() }
252-
}
253-
254-
fn push(&mut self, stream: TokenStream) {
255-
self.acc.extend(stream.into_iter())
256-
}
257-
258-
fn build(self) -> TokenStream {
259-
self.acc
260-
}
261-
}
262-
26390
pub struct FreeFunctions;
26491

265-
#[derive(Clone)]
266-
pub struct TokenStreamIter {
267-
trees: IntoIter<TokenTree>,
268-
}
269-
27092
#[derive(Default)]
27193
pub struct RustAnalyzer {
272-
ident_interner: IdentInterner,
27394
// FIXME: store span information here.
27495
}
27596

27697
impl server::Types for RustAnalyzer {
27798
type FreeFunctions = FreeFunctions;
27899
type TokenStream = TokenStream;
279-
type Ident = IdentId;
280-
type Literal = Literal;
281100
type SourceFile = SourceFile;
101+
type MultiSpan = Vec<Span>;
282102
type Diagnostic = Diagnostic;
283103
type Span = Span;
284-
type MultiSpan = Vec<Span>;
104+
type Symbol = Symbol;
285105
}
286106

287107
impl server::FreeFunctions for RustAnalyzer {
@@ -693,46 +513,8 @@ impl server::Server for RustAnalyzer {
693513

694514
#[cfg(test)]
695515
mod tests {
696-
use super::super::proc_macro::bridge::server::Literal;
697516
use super::*;
698517

699-
#[test]
700-
fn test_ra_server_literals() {
701-
let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
702-
assert_eq!(srv.integer("1234").text, "1234");
703-
704-
assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
705-
assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
706-
assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
707-
assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
708-
assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
709-
assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
710-
assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
711-
assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
712-
assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
713-
assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
714-
assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
715-
assert_eq!(srv.float("0").text, "0.0");
716-
assert_eq!(srv.float("15684.5867").text, "15684.5867");
717-
assert_eq!(srv.f32("15684.58").text, "15684.58f32");
718-
assert_eq!(srv.f64("15684.58").text, "15684.58f64");
719-
720-
assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
721-
assert_eq!(srv.character('c').text, "'c'");
722-
assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
723-
724-
// u128::max
725-
assert_eq!(
726-
srv.integer("340282366920938463463374607431768211455").text,
727-
"340282366920938463463374607431768211455"
728-
);
729-
// i128::min
730-
assert_eq!(
731-
srv.integer("-170141183460469231731687303715884105728").text,
732-
"-170141183460469231731687303715884105728"
733-
);
734-
}
735-
736518
#[test]
737519
fn test_ra_server_to_string() {
738520
let s = TokenStream {

0 commit comments

Comments
 (0)