Skip to content

Commit c901b42

Browse files
committed
Refactor token collection to capture trailing token immediately
1 parent b2e55bf commit c901b42

File tree

5 files changed

+124
-118
lines changed

5 files changed

+124
-118
lines changed

compiler/rustc_ast/src/tokenstream.rs

-11
Original file line numberDiff line numberDiff line change
@@ -127,14 +127,10 @@ where
127127
}
128128

129129
pub trait CreateTokenStream: sync::Send + sync::Sync {
130-
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream>;
131130
fn create_token_stream(&self) -> TokenStream;
132131
}
133132

134133
impl CreateTokenStream for TokenStream {
135-
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
136-
panic!("Cannot call `add_trailing_semi` on a `TokenStream`!");
137-
}
138134
fn create_token_stream(&self) -> TokenStream {
139135
self.clone()
140136
}
@@ -151,13 +147,6 @@ impl LazyTokenStream {
151147
LazyTokenStream(Lrc::new(Box::new(inner)))
152148
}
153149

154-
/// Extends the captured stream by one token,
155-
/// which must be a trailing semicolon. This
156-
/// affects the `TokenStream` created by `make_tokenstream`.
157-
pub fn add_trailing_semi(&self) -> LazyTokenStream {
158-
LazyTokenStream(Lrc::new(self.0.add_trailing_semi()))
159-
}
160-
161150
pub fn create_token_stream(&self) -> TokenStream {
162151
self.0.create_token_stream()
163152
}

compiler/rustc_parse/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@ pub fn nt_to_tokenstream(
292292
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
293293
return fake_token_stream(sess, nt);
294294
} else {
295-
panic!("Missing tokens for nt {:?}", pprust::nonterminal_to_string(nt));
295+
panic!("Missing tokens for nt at {:?}: {:?}", nt.span(), pprust::nonterminal_to_string(nt));
296296
}
297297
}
298298

compiler/rustc_parse/src/parser/item.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
22
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
3-
use super::{FollowedByType, ForceCollect, Parser, PathStyle};
3+
use super::{FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
44

55
use crate::{maybe_collect_tokens, maybe_whole};
66

@@ -125,7 +125,7 @@ impl<'a> Parser<'a> {
125125
let item = maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Self| {
126126
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
127127
unclosed_delims.append(&mut this.unclosed_delims);
128-
item
128+
Ok((item?, TrailingToken::None))
129129
})?;
130130

131131
self.unclosed_delims.append(&mut unclosed_delims);

compiler/rustc_parse/src/parser/mod.rs

+28-27
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,11 @@ pub enum ForceCollect {
6161
No,
6262
}
6363

64+
pub enum TrailingToken {
65+
None,
66+
Semi,
67+
}
68+
6469
/// Like `maybe_whole_expr`, but for things other than expressions.
6570
#[macro_export]
6671
macro_rules! maybe_whole {
@@ -1225,6 +1230,13 @@ impl<'a> Parser<'a> {
12251230
}
12261231
}
12271232

1233+
pub fn collect_tokens<R: HasTokens>(
1234+
&mut self,
1235+
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1236+
) -> PResult<'a, R> {
1237+
self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
1238+
}
1239+
12281240
/// Records all tokens consumed by the provided callback,
12291241
/// including the current token. These tokens are collected
12301242
/// into a `LazyTokenStream`, and returned along with the result
@@ -1241,9 +1253,9 @@ impl<'a> Parser<'a> {
12411253
/// This restriction shouldn't be an issue in practice,
12421254
/// since this function is used to record the tokens for
12431255
/// a parsed AST item, which always has matching delimiters.
1244-
pub fn collect_tokens<R: HasTokens>(
1256+
pub fn collect_tokens_trailing_token<R: HasTokens>(
12451257
&mut self,
1246-
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1258+
f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
12471259
) -> PResult<'a, R> {
12481260
let start_token = (self.token.clone(), self.token_spacing);
12491261
let cursor_snapshot = TokenCursor {
@@ -1256,7 +1268,7 @@ impl<'a> Parser<'a> {
12561268
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
12571269
};
12581270

1259-
let mut ret = f(self)?;
1271+
let (mut ret, trailing_token) = f(self)?;
12601272

12611273
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
12621274
// and `num_calls`, we can reconstruct the `TokenStream` seen
@@ -1275,55 +1287,44 @@ impl<'a> Parser<'a> {
12751287
cursor_snapshot: TokenCursor,
12761288
num_calls: usize,
12771289
desugar_doc_comments: bool,
1278-
trailing_semi: bool,
12791290
append_unglued_token: Option<TreeAndSpacing>,
12801291
}
12811292
impl CreateTokenStream for LazyTokenStreamImpl {
12821293
fn create_token_stream(&self) -> TokenStream {
1283-
let mut num_calls = self.num_calls;
1284-
if self.trailing_semi {
1285-
num_calls += 1;
1286-
}
12871294
// The token produced by the final call to `next` or `next_desugared`
12881295
// was not actually consumed by the callback. The combination
12891296
// of chaining the initial token and using `take` produces the desired
12901297
// result - we produce an empty `TokenStream` if no calls were made,
12911298
// and omit the final token otherwise.
12921299
let mut cursor_snapshot = self.cursor_snapshot.clone();
12931300
let tokens = std::iter::once(self.start_token.clone())
1294-
.chain((0..num_calls).map(|_| {
1301+
.chain((0..self.num_calls).map(|_| {
12951302
if self.desugar_doc_comments {
12961303
cursor_snapshot.next_desugared()
12971304
} else {
12981305
cursor_snapshot.next()
12991306
}
13001307
}))
1301-
.take(num_calls);
1308+
.take(self.num_calls);
13021309

13031310
make_token_stream(tokens, self.append_unglued_token.clone())
13041311
}
1305-
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
1306-
if self.trailing_semi {
1307-
panic!("Called `add_trailing_semi` twice!");
1308-
}
1309-
if self.append_unglued_token.is_some() {
1310-
panic!(
1311-
"Cannot call `add_trailing_semi` when we have an unglued token {:?}",
1312-
self.append_unglued_token
1313-
);
1314-
}
1315-
let mut new = self.clone();
1316-
new.trailing_semi = true;
1317-
Box::new(new)
1312+
}
1313+
1314+
let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
1315+
match trailing_token {
1316+
TrailingToken::None => {}
1317+
TrailingToken::Semi => {
1318+
assert_eq!(self.token.kind, token::Semi);
1319+
num_calls += 1;
13181320
}
13191321
}
13201322

13211323
let lazy_impl = LazyTokenStreamImpl {
13221324
start_token,
1323-
num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls,
1325+
num_calls,
13241326
cursor_snapshot,
13251327
desugar_doc_comments: self.desugar_doc_comments,
1326-
trailing_semi: false,
13271328
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
13281329
};
13291330
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
@@ -1427,9 +1428,9 @@ macro_rules! maybe_collect_tokens {
14271428
if matches!($force_collect, ForceCollect::Yes)
14281429
|| $crate::parser::attr::maybe_needs_tokens($attrs)
14291430
{
1430-
$self.collect_tokens($f)
1431+
$self.collect_tokens_trailing_token($f)
14311432
} else {
1432-
$f($self)
1433+
Ok($f($self)?.0)
14331434
}
14341435
};
14351436
}

0 commit comments

Comments
 (0)