@@ -267,6 +267,108 @@ struct RawConvertor<'a> {
267
267
inner : std:: slice:: Iter < ' a , Token > ,
268
268
}
269
269
270
+ trait SrcToken {
271
+ fn kind ( ) -> SyntaxKind ;
272
+ }
273
+
274
+ trait TokenConvertor {
275
+ type Token : SrcToken ;
276
+
277
+ fn go ( & mut self ) -> Option < tt:: Subtree > {
278
+ let mut subtree = tt:: Subtree :: default ( ) ;
279
+ subtree. delimiter = None ;
280
+ while self . peek ( ) . is_some ( ) {
281
+ self . collect_leaf ( & mut subtree. token_trees ) ;
282
+ }
283
+ if subtree. token_trees . is_empty ( ) {
284
+ return None ;
285
+ }
286
+ if subtree. token_trees . len ( ) == 1 {
287
+ if let tt:: TokenTree :: Subtree ( first) = & subtree. token_trees [ 0 ] {
288
+ return Some ( first. clone ( ) ) ;
289
+ }
290
+ }
291
+ Some ( subtree)
292
+ }
293
+
294
+ fn bump ( & mut self ) -> Option < ( Self :: Token , TextRange ) > ;
295
+
296
+ fn peek ( & self ) -> Option < Self :: Token > ;
297
+
298
+ fn collect_leaf ( & mut self , result : & mut Vec < tt:: TokenTree > ) {
299
+ let ( token, range) = match self . bump ( ) {
300
+ None => return ,
301
+ Some ( it) => it,
302
+ } ;
303
+
304
+ let k: SyntaxKind = token. kind ( ) ;
305
+ if k == COMMENT {
306
+ let node = doc_comment ( & self . text [ range] ) ;
307
+ if let Some ( tokens) = convert_doc_comment ( & node) {
308
+ result. extend ( tokens) ;
309
+ }
310
+ return ;
311
+ }
312
+
313
+ result. push ( if k. is_punct ( ) {
314
+ let delim = match k {
315
+ T ! [ '(' ] => Some ( ( tt:: DelimiterKind :: Parenthesis , T ! [ ')' ] ) ) ,
316
+ T ! [ '{' ] => Some ( ( tt:: DelimiterKind :: Brace , T ! [ '}' ] ) ) ,
317
+ T ! [ '[' ] => Some ( ( tt:: DelimiterKind :: Bracket , T ! [ ']' ] ) ) ,
318
+ _ => None ,
319
+ } ;
320
+
321
+ if let Some ( ( kind, closed) ) = delim {
322
+ let mut subtree = tt:: Subtree :: default ( ) ;
323
+ let id = self . id_alloc . open_delim ( range) ;
324
+ subtree. delimiter = Some ( tt:: Delimiter { kind, id } ) ;
325
+
326
+ while self . peek ( ) . map ( |it| it. kind != closed) . unwrap_or ( false ) {
327
+ self . collect_leaf ( & mut subtree. token_trees ) ;
328
+ }
329
+ let last_range = match self . bump ( ) {
330
+ None => return ,
331
+ Some ( it) => it. 1 ,
332
+ } ;
333
+ self . id_alloc . close_delim ( id, last_range) ;
334
+ subtree. into ( )
335
+ } else {
336
+ let spacing = match self . peek ( ) {
337
+ Some ( next)
338
+ if next. kind . is_trivia ( )
339
+ || next. kind == T ! [ '[' ]
340
+ || next. kind == T ! [ '{' ]
341
+ || next. kind == T ! [ '(' ] =>
342
+ {
343
+ tt:: Spacing :: Alone
344
+ }
345
+ Some ( next) if next. kind . is_punct ( ) => tt:: Spacing :: Joint ,
346
+ _ => tt:: Spacing :: Alone ,
347
+ } ;
348
+ let char =
349
+ self . text [ range] . chars ( ) . next ( ) . expect ( "Token from lexer must be single char" ) ;
350
+
351
+ tt:: Leaf :: from ( tt:: Punct { char, spacing, id : self . id_alloc . alloc ( range) } ) . into ( )
352
+ }
353
+ } else {
354
+ macro_rules! make_leaf {
355
+ ( $i: ident) => {
356
+ tt:: $i { id: self . id_alloc. alloc( range) , text: self . text[ range] . into( ) } . into( )
357
+ } ;
358
+ }
359
+ let leaf: tt:: Leaf = match k {
360
+ T ! [ true ] | T ! [ false ] => make_leaf ! ( Literal ) ,
361
+ IDENT | LIFETIME => make_leaf ! ( Ident ) ,
362
+ k if k. is_keyword ( ) => make_leaf ! ( Ident ) ,
363
+ k if k. is_literal ( ) => make_leaf ! ( Literal ) ,
364
+ _ => return ,
365
+ } ;
366
+
367
+ leaf. into ( )
368
+ } ) ;
369
+ }
370
+ }
371
+
270
372
impl RawConvertor < ' _ > {
271
373
fn go ( & mut self ) -> Option < tt:: Subtree > {
272
374
let mut subtree = tt:: Subtree :: default ( ) ;
@@ -295,6 +397,7 @@ impl RawConvertor<'_> {
295
397
fn peek ( & self ) -> Option < Token > {
296
398
self . inner . as_slice ( ) . get ( 0 ) . cloned ( )
297
399
}
400
+
298
401
299
402
fn collect_leaf ( & mut self , result : & mut Vec < tt:: TokenTree > ) {
300
403
let ( token, range) = match self . bump ( ) {
0 commit comments