Skip to content

Commit 8fc2809

Browse files
committed
Impl Copy for Token and TokenKind.
1 parent 2c24f50 commit 8fc2809

File tree

21 files changed

+52
-57
lines changed

21 files changed

+52
-57
lines changed

compiler/rustc_ast/src/token.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
259259
.contains(&name)
260260
}
261261

262-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
262+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
263263
pub enum TokenKind {
264264
/* Expression-operator symbols. */
265265
Eq,
@@ -329,7 +329,7 @@ pub enum TokenKind {
329329
Eof,
330330
}
331331

332-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
332+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
333333
pub struct Token {
334334
pub kind: TokenKind,
335335
pub span: Span,

compiler/rustc_ast/src/tokenstream.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -461,7 +461,7 @@ impl TokenStream {
461461
Delimiter::Invisible(InvisibleSource::FlattenToken),
462462
TokenStream::token_alone(token::Lifetime(name), uninterpolated_span),
463463
),
464-
_ => TokenTree::Token(token.clone(), spacing),
464+
_ => TokenTree::Token(*token, spacing),
465465
}
466466
}
467467

compiler/rustc_expand/src/mbe/diagnostics.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
147147
.map_or(true, |failure| failure.is_better_position(*approx_position))
148148
{
149149
self.best_failure = Some(BestFailure {
150-
token: token.clone(),
150+
token: *token,
151151
position_in_tokenstream: *approx_position,
152152
msg,
153153
remaining_matcher: self

compiler/rustc_expand/src/mbe/macro_parser.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
181181
for tt in tts {
182182
match tt {
183183
TokenTree::Token(token) => {
184-
locs.push(MatcherLoc::Token { token: token.clone() });
184+
locs.push(MatcherLoc::Token { token: *token });
185185
}
186186
TokenTree::Delimited(span, delimited) => {
187187
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
@@ -645,7 +645,7 @@ impl TtParser {
645645
// There are no possible next positions AND we aren't waiting for the black-box
646646
// parser: syntax error.
647647
return Failure(T::build_failure(
648-
parser.token.clone(),
648+
parser.token,
649649
parser.approx_token_stream_pos(),
650650
"no rules expected this token in macro call",
651651
));

compiler/rustc_expand/src/mbe/macro_rules.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -807,7 +807,7 @@ impl<'tt> FirstSets<'tt> {
807807
// token could be the separator token itself.
808808

809809
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
810-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
810+
first.add_one_maybe(TtHandle::from_token(*sep));
811811
}
812812

813813
// Reverse scan: Sequence comes before `first`.
@@ -870,7 +870,7 @@ impl<'tt> FirstSets<'tt> {
870870
// If the sequence contents can be empty, then the first
871871
// token could be the separator token itself.
872872
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
873-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
873+
first.add_one_maybe(TtHandle::from_token(*sep));
874874
}
875875

876876
assert!(first.maybe_empty);
@@ -946,7 +946,7 @@ impl<'tt> Clone for TtHandle<'tt> {
946946
// This variant *must* contain a `mbe::TokenTree::Token`, and not
947947
// any other variant of `mbe::TokenTree`.
948948
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
949-
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
949+
TtHandle::Token(mbe::TokenTree::Token(*tok))
950950
}
951951

952952
_ => unreachable!(),
@@ -1120,7 +1120,7 @@ fn check_matcher_core<'tt>(
11201120
let mut new;
11211121
let my_suffix = if let Some(sep) = &seq_rep.separator {
11221122
new = suffix_first.clone();
1123-
new.add_one_maybe(TtHandle::from_token(sep.clone()));
1123+
new.add_one_maybe(TtHandle::from_token(*sep));
11241124
&new
11251125
} else {
11261126
&suffix_first

compiler/rustc_expand/src/mbe/quoted.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ fn parse_tree<'a>(
257257
}
258258

259259
// `tree` is an arbitrary token. Keep it.
260-
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
260+
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
261261

262262
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
263263
// descend into the delimited set and further parse it.
@@ -294,7 +294,7 @@ fn parse_kleene_op<'a>(
294294
match input.next() {
295295
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) {
296296
Some(op) => Ok(Ok((op, token.span))),
297-
None => Ok(Err(token.clone())),
297+
None => Ok(Err(*token)),
298298
},
299299
tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
300300
}

compiler/rustc_expand/src/mbe/transcribe.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ pub(super) fn transcribe<'a>(
128128
if repeat_idx < repeat_len {
129129
*idx = 0;
130130
if let Some(sep) = sep {
131-
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
131+
result.push(TokenTree::Token(*sep, Spacing::Alone));
132132
}
133133
continue;
134134
}
@@ -330,7 +330,7 @@ pub(super) fn transcribe<'a>(
330330
// Nothing much to do here. Just push the token to the result, being careful to
331331
// preserve syntax context.
332332
mbe::TokenTree::Token(token) => {
333-
let mut token = token.clone();
333+
let mut token = *token;
334334
mut_visit::visit_token(&mut token, &mut marker);
335335
let tt = TokenTree::Token(token, Spacing::Alone);
336336
result.push(tt);

compiler/rustc_parse/src/lexer/unicode_chars.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ pub(super) fn check_for_substitution(
377377
ascii_name,
378378
})
379379
};
380-
(token.clone(), sugg)
380+
(*token, sugg)
381381
}
382382

383383
/// Extract string if found at current position with given delimiters

compiler/rustc_parse/src/parser/attr.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ impl<'a> Parser<'a> {
411411
Err(err) => err.cancel(),
412412
}
413413

414-
Err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }
414+
Err(InvalidMetaItem { span: self.token.span, token: self.token }
415415
.into_diagnostic(&self.sess.span_diagnostic))
416416
}
417417
}

compiler/rustc_parse/src/parser/attr_wrapper.rs

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -104,13 +104,12 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
104104
// produce an empty `TokenStream` if no calls were made, and omit the
105105
// final token otherwise.
106106
let mut cursor_snapshot = self.cursor_snapshot.clone();
107-
let tokens =
108-
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
109-
.chain((0..self.num_calls).map(|_| {
110-
let token = cursor_snapshot.next();
111-
(FlatToken::Token(token.0), token.1)
112-
}))
113-
.take(self.num_calls);
107+
let tokens = std::iter::once((FlatToken::Token(self.start_token.0), self.start_token.1))
108+
.chain((0..self.num_calls).map(|_| {
109+
let token = cursor_snapshot.next();
110+
(FlatToken::Token(token.0), token.1)
111+
}))
112+
.take(self.num_calls);
114113

115114
if !self.replace_ranges.is_empty() {
116115
let mut tokens: Vec<_> = tokens.collect();
@@ -211,7 +210,7 @@ impl<'a> Parser<'a> {
211210
return Ok(f(self, attrs.attrs)?.0);
212211
}
213212

214-
let start_token = (self.token.clone(), self.token_spacing);
213+
let start_token = (self.token, self.token_spacing);
215214
let cursor_snapshot = self.token_cursor.clone();
216215
let start_pos = self.num_bump_calls;
217216

0 commit comments

Comments
 (0)