Skip to content

Commit 67c76c3

Browse files
bors[bot]matklad
andauthored
Merge #6165
6165: Cleanup r=matklad a=matklad bors r+ 🤖 Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
2 parents faddea9 + fd8622e commit 67c76c3

File tree

2 files changed

+23
-22
lines changed

2 files changed

+23
-22
lines changed

crates/mbe/src/subtree_source.rs

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
33
use parser::{Token, TokenSource};
44
use std::cell::{Cell, Ref, RefCell};
5-
use syntax::{tokenize, SmolStr, SyntaxKind, SyntaxKind::*, T};
5+
use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T};
66
use tt::buffer::{Cursor, TokenBuffer};
77

88
#[derive(Debug, Clone, Eq, PartialEq)]
@@ -155,17 +155,15 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
155155
}
156156

157157
fn convert_literal(l: &tt::Literal) -> TtToken {
158-
let mut kinds = tokenize(&l.text).0.into_iter().map(|token| token.kind);
159-
160-
let kind = match kinds.next() {
161-
Some(kind) if kind.is_literal() => Some(kind),
162-
Some(SyntaxKind::MINUS) => match kinds.next() {
163-
Some(kind) if kind.is_literal() => Some(kind),
164-
_ => None,
165-
},
166-
_ => None,
167-
}
168-
.unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
158+
let is_negated = l.text.starts_with('-');
159+
let inner_text = &l.text[if is_negated { 1 } else { 0 }..];
160+
161+
let kind = lex_single_syntax_kind(inner_text)
162+
.map(|(kind, _error)| kind)
163+
.filter(|kind| {
164+
kind.is_literal() && (!is_negated || matches!(kind, FLOAT_NUMBER | INT_NUMBER))
165+
})
166+
.unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
169167

170168
TtToken { kind, is_joint_to_next: false, text: l.text.clone() }
171169
}

crates/syntax/src/parsing/lexer.rs

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
//! Lexer analyzes raw input string and produces lexemes (tokens).
22
//! It is just a bridge to `rustc_lexer`.
33
4-
use rustc_lexer::{LiteralKind as LK, RawStrError};
5-
64
use std::convert::TryInto;
75

6+
use rustc_lexer::{LiteralKind as LK, RawStrError};
7+
88
use crate::{
99
SyntaxError,
1010
SyntaxKind::{self, *},
@@ -61,27 +61,30 @@ pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
6161
(tokens, errors)
6262
}
6363

64-
/// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token
65-
/// encountered at the beginning of the string.
64+
/// Returns `SyntaxKind` and `Option<SyntaxError>` if `text` parses as a single token.
6665
///
6766
/// Returns `None` if the string contains zero *or two or more* tokens.
6867
/// The token is malformed if the returned error is not `None`.
6968
///
7069
/// Beware that unescape errors are not checked at tokenization time.
7170
pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> {
72-
lex_first_token(text)
73-
.filter(|(token, _)| token.len == TextSize::of(text))
74-
.map(|(token, error)| (token.kind, error))
71+
let (first_token, err) = lex_first_token(text)?;
72+
if first_token.len != TextSize::of(text) {
73+
return None;
74+
}
75+
Some((first_token.kind, err))
7576
}
7677

7778
/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and
7879
/// returns `None` if any tokenization error occured.
7980
///
8081
/// Beware that unescape errors are not checked at tokenization time.
8182
pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
82-
lex_first_token(text)
83-
.filter(|(token, error)| !error.is_some() && token.len == TextSize::of(text))
84-
.map(|(token, _error)| token.kind)
83+
let (single_token, err) = lex_single_syntax_kind(text)?;
84+
if err.is_some() {
85+
return None;
86+
}
87+
Some(single_token)
8588
}
8689

8790
/// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token

0 commit comments

Comments
 (0)