Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit 7aef5db

Browse files
Impl Copy for Token and TokenKind.
1 parent 13fdc3d commit 7aef5db

File tree

19 files changed

+69
-70
lines changed

19 files changed

+69
-70
lines changed

‎compiler/rustc_ast/src/token.rs‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ impl From<IdentIsRaw> for bool {
278278
}
279279
}
280280

281-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
281+
#[derive(Clone, Copy,PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
282282
pub enum TokenKind {
283283
/* Expression-operator symbols. */
284284
/// `=`
@@ -376,7 +376,7 @@ pub enum TokenKind {
376376
Eof,
377377
}
378378

379-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
379+
#[derive(Clone, Copy,PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
380380
pub struct Token {
381381
pub kind: TokenKind,
382382
pub span: Span,

‎compiler/rustc_ast/src/tokenstream.rs‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -485,7 +485,7 @@ impl TokenStream {
485485
Delimiter::Invisible(InvisibleOrigin::FlattenToken),
486486
TokenStream::token_alone(token::Lifetime(ident.name), ident.span),
487487
),
488-
_ => TokenTree::Token(token.clone(), spacing),
488+
_ => TokenTree::Token(*token, spacing),
489489
}
490490
}
491491

‎compiler/rustc_expand/src/mbe/diagnostics.rs‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
153153
.map_or(true, |failure| failure.is_better_position(*approx_position))
154154
{
155155
self.best_failure = Some(BestFailure {
156-
token: token.clone(),
156+
token: *token,
157157
position_in_tokenstream: *approx_position,
158158
msg,
159159
remaining_matcher: self

‎compiler/rustc_expand/src/mbe/macro_parser.rs‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
181181
for tt in tts {
182182
match tt {
183183
TokenTree::Token(token) => {
184-
locs.push(MatcherLoc::Token { token: token.clone() });
184+
locs.push(MatcherLoc::Token { token: *token });
185185
}
186186
TokenTree::Delimited(span, _, delimited) => {
187187
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
@@ -645,7 +645,7 @@ impl TtParser {
645645
// There are no possible next positions AND we aren't waiting for the black-box
646646
// parser: syntax error.
647647
return Failure(T::build_failure(
648-
parser.token.clone(),
648+
parser.token,
649649
parser.approx_token_stream_pos(),
650650
"no rules expected this token in macro call",
651651
));

‎compiler/rustc_expand/src/mbe/macro_rules.rs‎

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -787,7 +787,7 @@ impl<'tt> FirstSets<'tt> {
787787
// token could be the separator token itself.
788788

789789
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
790-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
790+
first.add_one_maybe(TtHandle::from_token(*sep));
791791
}
792792

793793
// Reverse scan: Sequence comes before `first`.
@@ -850,7 +850,7 @@ impl<'tt> FirstSets<'tt> {
850850
// If the sequence contents can be empty, then the first
851851
// token could be the separator token itself.
852852
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
853-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
853+
first.add_one_maybe(TtHandle::from_token(*sep));
854854
}
855855

856856
assert!(first.maybe_empty);
@@ -926,7 +926,7 @@ impl<'tt> Clone for TtHandle<'tt> {
926926
// This variant *must* contain a `mbe::TokenTree::Token`, and not
927927
// any other variant of `mbe::TokenTree`.
928928
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
929-
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
929+
TtHandle::Token(mbe::TokenTree::Token(*tok))
930930
}
931931

932932
_ => unreachable!(),
@@ -1102,7 +1102,7 @@ fn check_matcher_core<'tt>(
11021102
let mut new;
11031103
let my_suffix = if let Some(sep) = &seq_rep.separator {
11041104
new = suffix_first.clone();
1105-
new.add_one_maybe(TtHandle::from_token(sep.clone()));
1105+
new.add_one_maybe(TtHandle::from_token(*sep));
11061106
&new
11071107
} else {
11081108
&suffix_first

‎compiler/rustc_expand/src/mbe/quoted.rs‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -258,7 +258,7 @@ fn parse_tree<'a>(
258258
}
259259

260260
// `tree` is an arbitrary token. Keep it.
261-
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
261+
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
262262

263263
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
264264
// descend into the delimited set and further parse it.
@@ -296,7 +296,7 @@ fn parse_kleene_op<'a>(
296296
match input.next() {
297297
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
298298
Some(op) => Ok(Ok((op, token.span))),
299-
None => Ok(Err(token.clone())),
299+
None => Ok(Err(*token)),
300300
},
301301
tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
302302
}

‎compiler/rustc_expand/src/mbe/transcribe.rs‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ pub(super) fn transcribe<'a>(
155155
if repeat_idx < repeat_len {
156156
frame.idx = 0;
157157
if let Some(sep) = sep {
158-
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
158+
result.push(TokenTree::Token(*sep, Spacing::Alone));
159159
}
160160
continue;
161161
}
@@ -364,7 +364,7 @@ pub(super) fn transcribe<'a>(
364364
// Nothing much to do here. Just push the token to the result, being careful to
365365
// preserve syntax context.
366366
mbe::TokenTree::Token(token) => {
367-
let mut token = token.clone();
367+
let mut token = *token;
368368
mut_visit::visit_token(&mut token, &mut marker);
369369
let tt = TokenTree::Token(token, Spacing::Alone);
370370
result.push(tt);

‎compiler/rustc_parse/src/lexer/unicode_chars.rs‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -376,7 +376,7 @@ pub(super) fn check_for_substitution(
376376
ascii_name,
377377
})
378378
};
379-
(token.clone(), sugg)
379+
(*token, sugg)
380380
}
381381

382382
/// Extract string if found at current position with given delimiters

‎compiler/rustc_parse/src/parser/attr_wrapper.rs‎

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -104,13 +104,12 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
104104
// produce an empty `TokenStream` if no calls were made, and omit the
105105
// final token otherwise.
106106
let mut cursor_snapshot = self.cursor_snapshot.clone();
107-
let tokens =
108-
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
109-
.chain(std::iter::repeat_with(|| {
110-
let token = cursor_snapshot.next();
111-
(FlatToken::Token(token.0), token.1)
112-
}))
113-
.take(self.num_calls);
107+
let tokens = std::iter::once((FlatToken::Token(self.start_token.0), self.start_token.1))
108+
.chain(std::iter::repeat_with(|| {
109+
let token = cursor_snapshot.next();
110+
(FlatToken::Token(token.0), token.1)
111+
}))
112+
.take(self.num_calls);
114113

115114
if !self.replace_ranges.is_empty() {
116115
let mut tokens: Vec<_> = tokens.collect();
@@ -211,7 +210,7 @@ impl<'a> Parser<'a> {
211210
return Ok(f(self, attrs.attrs)?.0);
212211
}
213212

214-
let start_token = (self.token.clone(), self.token_spacing);
213+
let start_token = (self.token, self.token_spacing);
215214
let cursor_snapshot = self.token_cursor.clone();
216215
let start_pos = self.num_bump_calls;
217216

‎compiler/rustc_parse/src/parser/diagnostics.rs‎

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ impl<'a> Parser<'a> {
287287
let mut recovered_ident = None;
288288
// we take this here so that the correct original token is retained in
289289
// the diagnostic, regardless of eager recovery.
290-
let bad_token = self.token.clone();
290+
let bad_token = self.token;
291291

292292
// suggest prepending a keyword in identifier position with `r#`
293293
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
@@ -347,7 +347,7 @@ impl<'a> Parser<'a> {
347347
// if the previous token is a valid keyword
348348
// that might use a generic, then suggest a correct
349349
// generic placement (later on)
350-
let maybe_keyword = self.prev_token.clone();
350+
let maybe_keyword = self.prev_token;
351351
if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
352352
// if we have a valid keyword, attempt to parse generics
353353
// also obtain the keywords symbol
@@ -463,7 +463,7 @@ impl<'a> Parser<'a> {
463463
false
464464
}
465465

466-
if **token != parser::TokenType::Token(self.token.kind.clone()) {
466+
if **token != parser::TokenType::Token(self.token.kind) {
467467
let eq = is_ident_eq_keyword(&self.token.kind, &token);
468468
// If the suggestion is a keyword and the found token is an ident,
469469
// the content of which are equal to the suggestion's content,
@@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
527527
// let y = 42;
528528
let guar = self.dcx().emit_err(ExpectedSemi {
529529
span: self.token.span,
530-
token: self.token.clone(),
530+
token: self.token,
531531
unexpected_token_label: None,
532532
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
533533
});
@@ -552,7 +552,7 @@ impl<'a> Parser<'a> {
552552
let span = self.prev_token.span.shrink_to_hi();
553553
let guar = self.dcx().emit_err(ExpectedSemi {
554554
span,
555-
token: self.token.clone(),
555+
token: self.token,
556556
unexpected_token_label: Some(self.token.span),
557557
sugg: ExpectedSemiSugg::AddSemi(span),
558558
});
@@ -748,7 +748,7 @@ impl<'a> Parser<'a> {
748748
let span = self.prev_token.span.shrink_to_hi();
749749
let mut err = self.dcx().create_err(ExpectedSemi {
750750
span,
751-
token: self.token.clone(),
751+
token: self.token,
752752
unexpected_token_label: Some(self.token.span),
753753
sugg: ExpectedSemiSugg::AddSemi(span),
754754
});
@@ -2371,7 +2371,7 @@ impl<'a> Parser<'a> {
23712371
// code was interpreted. This helps the user realize when a macro argument of one type is
23722372
// later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
23732373
// in a subsequent macro invocation (#71039).
2374-
let mut tok = self.token.clone();
2374+
let mut tok = self.token;
23752375
let mut labels = vec![];
23762376
while let TokenKind::Interpolated(nt) = &tok.kind {
23772377
let tokens = nt.tokens();
@@ -2381,7 +2381,7 @@ impl<'a> Parser<'a> {
23812381
&& let tokens = tokens.0.deref()
23822382
&& let [AttrTokenTree::Token(token, _)] = &tokens[..]
23832383
{
2384-
tok = token.clone();
2384+
tok = token;
23852385
} else {
23862386
break;
23872387
}

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /