Skip to content

Commit c8ade46

Browse files
authored
Rollup merge of rust-lang#58476 - nnethercote:rm-LazyTokenStream, r=petrochenkov
Remove `LazyTokenStream`. `LazyTokenStream` was added in rust-lang#40939. Perhaps it was an effective optimization then, but no longer. This PR removes it, making the code both simpler and faster. r? @alexcrichton
2 parents 4fc275d + 895a794 commit c8ade46

File tree

16 files changed

+168
-226
lines changed

16 files changed

+168
-226
lines changed

src/librustc/hir/lowering.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -1124,19 +1124,19 @@ impl<'a> LoweringContext<'a> {
11241124
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
11251125
span,
11261126
delim,
1127-
self.lower_token_stream(tts.into()).into(),
1127+
self.lower_token_stream(tts),
11281128
).into(),
11291129
}
11301130
}
11311131

11321132
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
11331133
match token {
1134-
Token::Interpolated(_) => {}
1135-
other => return TokenTree::Token(span, other).into(),
1134+
Token::Interpolated(nt) => {
1135+
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
1136+
self.lower_token_stream(tts)
1137+
}
1138+
other => TokenTree::Token(span, other).into(),
11361139
}
1137-
1138-
let tts = token.interpolated_to_tokenstream(&self.sess.parse_sess, span);
1139-
self.lower_token_stream(tts)
11401140
}
11411141

11421142
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {

src/librustc/hir/map/def_collector.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
339339

340340
fn visit_token(&mut self, t: Token) {
341341
if let Token::Interpolated(nt) = t {
342-
if let token::NtExpr(ref expr) = nt.0 {
342+
if let token::NtExpr(ref expr) = *nt {
343343
if let ExprKind::Mac(..) = expr.node {
344344
self.visit_macro_invoc(expr.id);
345345
}

src/librustc_resolve/build_reduced_graph.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1025,7 +1025,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
10251025

10261026
fn visit_token(&mut self, t: Token) {
10271027
if let Token::Interpolated(nt) = t {
1028-
if let token::NtExpr(ref expr) = nt.0 {
1028+
if let token::NtExpr(ref expr) = *nt {
10291029
if let ast::ExprKind::Mac(..) = expr.node {
10301030
self.visit_invoc(expr.id);
10311031
}

src/libsyntax/attr/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -517,7 +517,7 @@ impl MetaItem {
517517
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
518518
Path { span, segments }
519519
}
520-
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
520+
Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
521521
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
522522
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
523523
token::Nonterminal::NtPath(ref path) => path.clone(),
@@ -682,7 +682,7 @@ impl LitKind {
682682
match token {
683683
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
684684
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
685-
Token::Interpolated(ref nt) => match nt.0 {
685+
Token::Interpolated(nt) => match *nt {
686686
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
687687
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
688688
_ => None,

src/libsyntax/ext/base.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -266,7 +266,7 @@ impl<F> TTMacroExpander for F
266266
impl MutVisitor for AvoidInterpolatedIdents {
267267
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
268268
if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
269-
if let token::NtIdent(ident, is_raw) = nt.0 {
269+
if let token::NtIdent(ident, is_raw) = **nt {
270270
*tt = tokenstream::TokenTree::Token(ident.span,
271271
token::Ident(ident, is_raw));
272272
}

src/libsyntax/ext/expand.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ use syntax_pos::{Span, DUMMY_SP, FileName};
2525
use syntax_pos::hygiene::ExpnFormat;
2626

2727
use rustc_data_structures::fx::FxHashMap;
28+
use rustc_data_structures::sync::Lrc;
2829
use std::fs;
2930
use std::io::ErrorKind;
3031
use std::{iter, mem};
@@ -584,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
584585
}
585586
AttrProcMacro(ref mac, ..) => {
586587
self.gate_proc_macro_attr_item(attr.span, &item);
587-
let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item {
588+
let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
588589
Annotatable::Item(item) => token::NtItem(item),
589590
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
590591
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
591592
Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
592593
Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
593594
Annotatable::Expr(expr) => token::NtExpr(expr),
594-
})).into();
595+
}))).into();
595596
let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
596597
let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
597598
let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,

src/libsyntax/ext/tt/macro_parser.rs

+15-14
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,7 @@ use smallvec::{smallvec, SmallVec};
8888
use syntax_pos::Span;
8989

9090
use rustc_data_structures::fx::FxHashMap;
91+
use rustc_data_structures::sync::Lrc;
9192
use std::collections::hash_map::Entry::{Occupied, Vacant};
9293
use std::mem;
9394
use std::ops::{Deref, DerefMut};
@@ -179,7 +180,7 @@ struct MatcherPos<'root, 'tt: 'root> {
179180
/// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
180181
/// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
181182
/// wants the shared `matches`, one should use `up.matches`.
182-
matches: Box<[Rc<NamedMatchVec>]>,
183+
matches: Box<[Lrc<NamedMatchVec>]>,
183184
/// The position in `matches` corresponding to the first metavar in this matcher's sequence of
184185
/// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
185186
/// to `matches[match_lo]`.
@@ -218,7 +219,7 @@ struct MatcherPos<'root, 'tt: 'root> {
218219
impl<'root, 'tt> MatcherPos<'root, 'tt> {
219220
/// Adds `m` as a named match for the `idx`-th metavar.
220221
fn push_match(&mut self, idx: usize, m: NamedMatch) {
221-
let matches = Rc::make_mut(&mut self.matches[idx]);
222+
let matches = Lrc::make_mut(&mut self.matches[idx]);
222223
matches.push(m);
223224
}
224225
}
@@ -295,11 +296,11 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
295296
}
296297

297298
/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
298-
fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> {
299+
fn create_matches(len: usize) -> Box<[Lrc<NamedMatchVec>]> {
299300
if len == 0 {
300301
vec![]
301302
} else {
302-
let empty_matches = Rc::new(SmallVec::new());
303+
let empty_matches = Lrc::new(SmallVec::new());
303304
vec![empty_matches; len]
304305
}.into_boxed_slice()
305306
}
@@ -353,8 +354,8 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
353354
/// token tree it was derived from.
354355
#[derive(Debug, Clone)]
355356
pub enum NamedMatch {
356-
MatchedSeq(Rc<NamedMatchVec>, DelimSpan),
357-
MatchedNonterminal(Rc<Nonterminal>),
357+
MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
358+
MatchedNonterminal(Lrc<Nonterminal>),
358359
}
359360

360361
/// Takes a sequence of token trees `ms` representing a matcher which successfully matched input
@@ -561,7 +562,7 @@ fn inner_parse_loop<'root, 'tt>(
561562
new_item.match_cur += seq.num_captures;
562563
new_item.idx += 1;
563564
for idx in item.match_cur..item.match_cur + seq.num_captures {
564-
new_item.push_match(idx, MatchedSeq(Rc::new(smallvec![]), sp));
565+
new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![]), sp));
565566
}
566567
cur_items.push(new_item);
567568
}
@@ -707,7 +708,7 @@ pub fn parse(
707708
let matches = eof_items[0]
708709
.matches
709710
.iter_mut()
710-
.map(|dv| Rc::make_mut(dv).pop().unwrap());
711+
.map(|dv| Lrc::make_mut(dv).pop().unwrap());
711712
return nameize(sess, ms, matches);
712713
} else if eof_items.len() > 1 {
713714
return Error(
@@ -780,7 +781,7 @@ pub fn parse(
780781
let match_cur = item.match_cur;
781782
item.push_match(
782783
match_cur,
783-
MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.as_str()))),
784+
MatchedNonterminal(Lrc::new(parse_nt(&mut parser, span, &ident.as_str()))),
784785
);
785786
item.idx += 1;
786787
item.match_cur += 1;
@@ -829,7 +830,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
829830
},
830831
"block" => match *token {
831832
Token::OpenDelim(token::Brace) => true,
832-
Token::Interpolated(ref nt) => match nt.0 {
833+
Token::Interpolated(ref nt) => match **nt {
833834
token::NtItem(_)
834835
| token::NtPat(_)
835836
| token::NtTy(_)
@@ -843,9 +844,9 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
843844
},
844845
"path" | "meta" => match *token {
845846
Token::ModSep | Token::Ident(..) => true,
846-
Token::Interpolated(ref nt) => match nt.0 {
847+
Token::Interpolated(ref nt) => match **nt {
847848
token::NtPath(_) | token::NtMeta(_) => true,
848-
_ => may_be_ident(&nt.0),
849+
_ => may_be_ident(&nt),
849850
},
850851
_ => false,
851852
},
@@ -862,12 +863,12 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
862863
Token::ModSep | // path
863864
Token::Lt | // path (UFCS constant)
864865
Token::BinOp(token::Shl) => true, // path (double UFCS)
865-
Token::Interpolated(ref nt) => may_be_ident(&nt.0),
866+
Token::Interpolated(ref nt) => may_be_ident(nt),
866867
_ => false,
867868
},
868869
"lifetime" => match *token {
869870
Token::Lifetime(_) => true,
870-
Token::Interpolated(ref nt) => match nt.0 {
871+
Token::Interpolated(ref nt) => match **nt {
871872
token::NtLifetime(_) | token::NtTT(_) => true,
872873
_ => false,
873874
},

src/libsyntax/ext/tt/transcribe.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ pub fn transcribe(cx: &ExtCtxt<'_>,
149149
result.push(tt.clone().into());
150150
} else {
151151
sp = sp.apply_mark(cx.current_expansion.mark);
152-
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
152+
let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
153153
result.push(token.into());
154154
}
155155
} else {

src/libsyntax/mut_visit.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -581,9 +581,8 @@ pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
581581
token::Ident(id, _is_raw) => vis.visit_ident(id),
582582
token::Lifetime(id) => vis.visit_ident(id),
583583
token::Interpolated(nt) => {
584-
let nt = Lrc::make_mut(nt);
585-
vis.visit_interpolated(&mut nt.0);
586-
nt.1 = token::LazyTokenStream::new();
584+
let mut nt = Lrc::make_mut(nt);
585+
vis.visit_interpolated(&mut nt);
587586
}
588587
_ => {}
589588
}

src/libsyntax/parse/attr.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ impl<'a> Parser<'a> {
141141
/// The delimiters or `=` are still put into the resulting token stream.
142142
crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
143143
let meta = match self.token {
144-
token::Interpolated(ref nt) => match nt.0 {
144+
token::Interpolated(ref nt) => match **nt {
145145
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
146146
_ => None,
147147
},
@@ -227,7 +227,7 @@ impl<'a> Parser<'a> {
227227
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
228228
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
229229
let nt_meta = match self.token {
230-
token::Interpolated(ref nt) => match nt.0 {
230+
token::Interpolated(ref nt) => match **nt {
231231
token::NtMeta(ref e) => Some(e.clone()),
232232
_ => None,
233233
},

src/libsyntax/parse/parser.rs

+8-8
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ enum BlockMode {
119119
macro_rules! maybe_whole_expr {
120120
($p:expr) => {
121121
if let token::Interpolated(nt) = $p.token.clone() {
122-
match nt.0 {
122+
match *nt {
123123
token::NtExpr(ref e) | token::NtLiteral(ref e) => {
124124
$p.bump();
125125
return Ok((*e).clone());
@@ -146,7 +146,7 @@ macro_rules! maybe_whole_expr {
146146
macro_rules! maybe_whole {
147147
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
148148
if let token::Interpolated(nt) = $p.token.clone() {
149-
if let token::$constructor($x) = nt.0.clone() {
149+
if let token::$constructor($x) = (*nt).clone() {
150150
$p.bump();
151151
return Ok($e);
152152
}
@@ -1570,7 +1570,7 @@ impl<'a> Parser<'a> {
15701570
Some(body)
15711571
}
15721572
token::Interpolated(ref nt) => {
1573-
match &nt.0 {
1573+
match **nt {
15741574
token::NtBlock(..) => {
15751575
*at_end = true;
15761576
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
@@ -1913,7 +1913,7 @@ impl<'a> Parser<'a> {
19131913

19141914
fn is_named_argument(&mut self) -> bool {
19151915
let offset = match self.token {
1916-
token::Interpolated(ref nt) => match nt.0 {
1916+
token::Interpolated(ref nt) => match **nt {
19171917
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
19181918
_ => 0,
19191919
}
@@ -2099,7 +2099,7 @@ impl<'a> Parser<'a> {
20992099
/// Matches `token_lit = LIT_INTEGER | ...`.
21002100
fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
21012101
let out = match self.token {
2102-
token::Interpolated(ref nt) => match nt.0 {
2102+
token::Interpolated(ref nt) => match **nt {
21032103
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
21042104
ExprKind::Lit(ref lit) => { lit.node.clone() }
21052105
_ => { return self.unexpected_last(&self.token); }
@@ -2299,7 +2299,7 @@ impl<'a> Parser<'a> {
22992299
/// attributes.
23002300
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
23012301
let meta_ident = match self.token {
2302-
token::Interpolated(ref nt) => match nt.0 {
2302+
token::Interpolated(ref nt) => match **nt {
23032303
token::NtMeta(ref meta) => match meta.node {
23042304
ast::MetaItemKind::Word => Some(meta.ident.clone()),
23052305
_ => None,
@@ -3271,7 +3271,7 @@ impl<'a> Parser<'a> {
32713271
self.meta_var_span = Some(self.span);
32723272
// Interpolated identifier and lifetime tokens are replaced with usual identifier
32733273
// and lifetime tokens, so the former are never encountered during normal parsing.
3274-
match nt.0 {
3274+
match **nt {
32753275
token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
32763276
token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
32773277
_ => return,
@@ -3403,7 +3403,7 @@ impl<'a> Parser<'a> {
34033403
// can't continue an expression after an ident
34043404
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
34053405
token::Literal(..) | token::Pound => true,
3406-
token::Interpolated(ref nt) => match nt.0 {
3406+
token::Interpolated(ref nt) => match **nt {
34073407
token::NtIdent(..) | token::NtExpr(..) |
34083408
token::NtBlock(..) | token::NtPath(..) => true,
34093409
_ => false,

0 commit comments

Comments
 (0)