Skip to content

Commit 4d8f757

Browse files
committed
Impl Copy for Token and TokenKind.
1 parent bb495d6 commit 4d8f757

File tree

20 files changed

+56
-57
lines changed

20 files changed

+56
-57
lines changed

compiler/rustc_ast/src/token.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -333,7 +333,7 @@ impl From<IdentIsRaw> for bool {
333333
}
334334
}
335335

336-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
336+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
337337
pub enum TokenKind {
338338
/* Expression-operator symbols. */
339339
/// `=`
@@ -471,7 +471,7 @@ pub enum TokenKind {
471471
Eof,
472472
}
473473

474-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
474+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
475475
pub struct Token {
476476
pub kind: TokenKind,
477477
pub span: Span,

compiler/rustc_ast/src/tokenstream.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -482,7 +482,7 @@ impl TokenStream {
482482
Delimiter::Invisible(InvisibleOrigin::FlattenToken),
483483
TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span),
484484
),
485-
_ => TokenTree::Token(token.clone(), spacing),
485+
_ => TokenTree::Token(*token, spacing),
486486
}
487487
}
488488

compiler/rustc_expand/src/mbe/diagnostics.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ impl<'dcx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'dcx, 'match
160160
.is_none_or(|failure| failure.is_better_position(*approx_position))
161161
{
162162
self.best_failure = Some(BestFailure {
163-
token: token.clone(),
163+
token: *token,
164164
position_in_tokenstream: *approx_position,
165165
msg,
166166
remaining_matcher: self

compiler/rustc_expand/src/mbe/macro_parser.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
179179
for tt in tts {
180180
match tt {
181181
TokenTree::Token(token) => {
182-
locs.push(MatcherLoc::Token { token: token.clone() });
182+
locs.push(MatcherLoc::Token { token: *token });
183183
}
184184
TokenTree::Delimited(span, _, delimited) => {
185185
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
@@ -648,7 +648,7 @@ impl TtParser {
648648
// There are no possible next positions AND we aren't waiting for the black-box
649649
// parser: syntax error.
650650
return Failure(T::build_failure(
651-
parser.token.clone(),
651+
parser.token,
652652
parser.approx_token_stream_pos(),
653653
"no rules expected this token in macro call",
654654
));

compiler/rustc_expand/src/mbe/macro_rules.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -778,7 +778,7 @@ impl<'tt> FirstSets<'tt> {
778778
// token could be the separator token itself.
779779

780780
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
781-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
781+
first.add_one_maybe(TtHandle::from_token(*sep));
782782
}
783783

784784
// Reverse scan: Sequence comes before `first`.
@@ -841,7 +841,7 @@ impl<'tt> FirstSets<'tt> {
841841
// If the sequence contents can be empty, then the first
842842
// token could be the separator token itself.
843843
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
844-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
844+
first.add_one_maybe(TtHandle::from_token(*sep));
845845
}
846846

847847
assert!(first.maybe_empty);
@@ -917,7 +917,7 @@ impl<'tt> Clone for TtHandle<'tt> {
917917
// This variant *must* contain a `mbe::TokenTree::Token`, and not
918918
// any other variant of `mbe::TokenTree`.
919919
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
920-
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
920+
TtHandle::Token(mbe::TokenTree::Token(*tok))
921921
}
922922

923923
_ => unreachable!(),
@@ -1093,7 +1093,7 @@ fn check_matcher_core<'tt>(
10931093
let mut new;
10941094
let my_suffix = if let Some(sep) = &seq_rep.separator {
10951095
new = suffix_first.clone();
1096-
new.add_one_maybe(TtHandle::from_token(sep.clone()));
1096+
new.add_one_maybe(TtHandle::from_token(*sep));
10971097
&new
10981098
} else {
10991099
&suffix_first

compiler/rustc_expand/src/mbe/quoted.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ fn parse_tree<'a>(
283283
}
284284

285285
// `tree` is an arbitrary token. Keep it.
286-
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
286+
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
287287

288288
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
289289
// descend into the delimited set and further parse it.
@@ -321,7 +321,7 @@ fn parse_kleene_op(
321321
match iter.next() {
322322
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
323323
Some(op) => Ok(Ok((op, token.span))),
324-
None => Ok(Err(token.clone())),
324+
None => Ok(Err(*token)),
325325
},
326326
tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
327327
}

compiler/rustc_expand/src/mbe/transcribe.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ pub(super) fn transcribe<'a>(
164164
if repeat_idx < repeat_len {
165165
frame.idx = 0;
166166
if let Some(sep) = sep {
167-
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
167+
result.push(TokenTree::Token(*sep, Spacing::Alone));
168168
}
169169
continue;
170170
}
@@ -438,7 +438,7 @@ pub(super) fn transcribe<'a>(
438438
// Nothing much to do here. Just push the token to the result, being careful to
439439
// preserve syntax context.
440440
mbe::TokenTree::Token(token) => {
441-
let mut token = token.clone();
441+
let mut token = *token;
442442
mut_visit::visit_token(&mut marker, &mut token);
443443
let tt = TokenTree::Token(token, Spacing::Alone);
444444
result.push(tt);

compiler/rustc_parse/src/lexer/unicode_chars.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -376,7 +376,7 @@ pub(super) fn check_for_substitution(
376376
ascii_name,
377377
})
378378
};
379-
(token.clone(), sugg)
379+
(*token, sugg)
380380
}
381381

382382
/// Extract string if found at current position with given delimiters

compiler/rustc_parse/src/parser/attr_wrapper.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
120120
// produce an empty `TokenStream` if no calls were made, and omit the
121121
// final token otherwise.
122122
let mut cursor_snapshot = self.cursor_snapshot.clone();
123-
let tokens = iter::once(FlatToken::Token(self.start_token.clone()))
123+
let tokens = iter::once(FlatToken::Token(self.start_token))
124124
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
125125
.take(self.num_calls as usize);
126126

@@ -186,7 +186,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
186186
impl<'a> Parser<'a> {
187187
pub(super) fn collect_pos(&self) -> CollectPos {
188188
CollectPos {
189-
start_token: (self.token.clone(), self.token_spacing),
189+
start_token: (self.token, self.token_spacing),
190190
cursor_snapshot: self.token_cursor.clone(),
191191
start_pos: self.num_bump_calls,
192192
}

compiler/rustc_parse/src/parser/diagnostics.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ impl<'a> Parser<'a> {
322322
let mut recovered_ident = None;
323323
// we take this here so that the correct original token is retained in
324324
// the diagnostic, regardless of eager recovery.
325-
let bad_token = self.token.clone();
325+
let bad_token = self.token;
326326

327327
// suggest prepending a keyword in identifier position with `r#`
328328
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
@@ -382,7 +382,7 @@ impl<'a> Parser<'a> {
382382
// if the previous token is a valid keyword
383383
// that might use a generic, then suggest a correct
384384
// generic placement (later on)
385-
let maybe_keyword = self.prev_token.clone();
385+
let maybe_keyword = self.prev_token;
386386
if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
387387
// if we have a valid keyword, attempt to parse generics
388388
// also obtain the keywords symbol
@@ -530,7 +530,7 @@ impl<'a> Parser<'a> {
530530
// let y = 42;
531531
let guar = self.dcx().emit_err(ExpectedSemi {
532532
span: self.token.span,
533-
token: self.token.clone(),
533+
token: self.token,
534534
unexpected_token_label: None,
535535
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
536536
});
@@ -555,7 +555,7 @@ impl<'a> Parser<'a> {
555555
let span = self.prev_token.span.shrink_to_hi();
556556
let guar = self.dcx().emit_err(ExpectedSemi {
557557
span,
558-
token: self.token.clone(),
558+
token: self.token,
559559
unexpected_token_label: Some(self.token.span),
560560
sugg: ExpectedSemiSugg::AddSemi(span),
561561
});
@@ -801,7 +801,7 @@ impl<'a> Parser<'a> {
801801
let span = self.prev_token.span.shrink_to_hi();
802802
let mut err = self.dcx().create_err(ExpectedSemi {
803803
span,
804-
token: self.token.clone(),
804+
token: self.token,
805805
unexpected_token_label: Some(self.token.span),
806806
sugg: ExpectedSemiSugg::AddSemi(span),
807807
});

0 commit comments

Comments
 (0)