Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 14 pull requests #73744

Closed
wants to merge 34 commits into from
Closed
Changes from 1 commit
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
f03cf99
Add a fast path for `std::thread::panicking`.
eduardosm May 26, 2020
e85df08
Don't move cursor in search box when using arrows to navigate search …
carols10cents Jun 2, 2020
560a996
Implement slice_strip feature
tesuji Jun 16, 2020
8a816ca
Cleanup MinGW LLVM linkage workaround
mati865 Jun 19, 2020
9766a93
Document the mod keyword
poliorcetics Jun 22, 2020
3c46e36
Document the mut keyword
poliorcetics Jun 22, 2020
ef24faf
Refactor non_ascii_idents lints, exclude ascii pair for confusable_id…
crlf0710 May 10, 2020
86f6c0e
Record span of `const` kw in GenericParamKind
ayazhafiz Jun 21, 2020
9bb414f
Fix ptr doc warnings.
ehuss Jun 23, 2020
d8ea10c
Document the return keyword
poliorcetics Jun 24, 2020
d36d351
Implement intrinsic
doctorn Jun 23, 2020
c2dfc25
Fix tests
doctorn Jun 24, 2020
4931996
Fix tests
doctorn Jun 24, 2020
771a1d8
Make `std::panicking::panic_count::is_zero` inline and move the slow …
eduardosm Jun 24, 2020
520461f
Provide suggestions for some moved value errors
estebank Jun 20, 2020
3d09017
Add a compile fail example, binding -> variable, apply suggestions
poliorcetics Jun 25, 2020
eb6d9a4
Add E0766 error for unterminated double quote byte string
GuillaumeGomez Jun 21, 2020
33302fa
Update UI test
GuillaumeGomez Jun 21, 2020
25e864e
Implement mixed script confusable lint.
crlf0710 Jun 23, 2020
77b0ed7
proc_macro: Stop flattening groups with dummy spans
petrochenkov Jun 7, 2020
1fbd160
Rollup merge of #72617 - eduardosm:panicking, r=Amanieu
Manishearth Jun 26, 2020
887ce8e
Rollup merge of #72770 - crlf0710:mixed_script_confusable, r=Manishearth
Manishearth Jun 26, 2020
c6683a5
Rollup merge of #72967 - integer32llc:prevent-default-arrows, r=kinnison
Manishearth Jun 26, 2020
8c038b7
Rollup merge of #73102 - petrochenkov:flatgroup, r=Aaron1011
Manishearth Jun 26, 2020
082cdf9
Rollup merge of #73414 - lzutao:slice_strip, r=dtolnay
Manishearth Jun 26, 2020
839bcef
Rollup merge of #73418 - doctorn:variants-intrinsic, r=kennytm
Manishearth Jun 26, 2020
0b67c1f
Rollup merge of #73507 - mati865:cleanup-mingw-llvm-linkage, r=matthe…
Manishearth Jun 26, 2020
0aef6ff
Rollup merge of #73534 - estebank:borrowck-suggestions, r=matthewjasper
Manishearth Jun 26, 2020
3fc8f8e
Rollup merge of #73581 - GuillaumeGomez:add-0766, r=varkor
Manishearth Jun 26, 2020
bab4731
Rollup merge of #73597 - ayazhafiz:i/const-span, r=ecstatic-morse
Manishearth Jun 26, 2020
19f7805
Rollup merge of #73619 - poliorcetics:mod-keyword, r=steveklabnik
Manishearth Jun 26, 2020
2334b93
Rollup merge of #73621 - poliorcetics:mut-keyword, r=steveklabnik
Manishearth Jun 26, 2020
5d4e2e1
Rollup merge of #73648 - poliorcetics:return-keyword, r=joshtriplett
Manishearth Jun 26, 2020
57167a9
Rollup merge of #73673 - ehuss:fix-ptr-docs, r=oli-obk
Manishearth Jun 26, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
proc_macro: Stop flattening groups with dummy spans
petrochenkov committed Jun 25, 2020

Verified

This commit was signed with the committer’s verified signature. The key has expired.
Kyle-Ye Kyle
commit 77b0ed70b3658cb7ba7b18f68519b3baf953213b
2 changes: 1 addition & 1 deletion src/librustc_ast/attr/mod.rs
Original file line number Diff line number Diff line change
@@ -475,7 +475,7 @@ impl MetaItem {
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments }
}
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
Some(TokenTree::Token(Token { kind: token::Interpolated(nt, _), .. })) => match *nt {
token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
token::Nonterminal::NtPath(ref path) => path.clone(),
_ => return None,
2 changes: 1 addition & 1 deletion src/librustc_ast/mut_visit.rs
Original file line number Diff line number Diff line change
@@ -656,7 +656,7 @@ pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
*span = ident.span;
return; // Avoid visiting the span for the second time.
}
token::Interpolated(nt) => {
token::Interpolated(nt, _) => {
let mut nt = Lrc::make_mut(nt);
vis.visit_interpolated(&mut nt);
}
31 changes: 20 additions & 11 deletions src/librustc_ast/token.rs
Original file line number Diff line number Diff line change
@@ -182,6 +182,15 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}

/// A hack used to pass AST fragments to attribute and derive macros
/// as a single nonterminal token instead of a token stream.
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum FlattenGroup {
Yes,
No,
}

#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum TokenKind {
/* Expression-operator symbols. */
@@ -236,7 +245,7 @@ pub enum TokenKind {
/// treat regular and interpolated lifetime identifiers in the same way.
Lifetime(Symbol),

Interpolated(Lrc<Nonterminal>),
Interpolated(Lrc<Nonterminal>, FlattenGroup),

// Can be expanded into several tokens.
/// A doc comment.
@@ -343,7 +352,7 @@ impl Token {
/// if they keep spans or perform edition checks.
pub fn uninterpolated_span(&self) -> Span {
match &self.kind {
Interpolated(nt) => nt.span(),
Interpolated(nt, _) => nt.span(),
_ => self.span,
}
}
@@ -382,7 +391,7 @@ impl Token {
ModSep | // global path
Lifetime(..) | // labeled loop
Pound => true, // expression attributes
Interpolated(ref nt) => match **nt {
Interpolated(ref nt, _) => match **nt {
NtLiteral(..) |
NtExpr(..) |
NtBlock(..) |
@@ -408,7 +417,7 @@ impl Token {
Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
Interpolated(ref nt) => match **nt {
Interpolated(ref nt, _) => match **nt {
NtTy(..) | NtPath(..) => true,
_ => false,
},
@@ -420,7 +429,7 @@ impl Token {
pub fn can_begin_const_arg(&self) -> bool {
match self.kind {
OpenDelim(Brace) => true,
Interpolated(ref nt) => match **nt {
Interpolated(ref nt, _) => match **nt {
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
_ => false,
},
@@ -455,7 +464,7 @@ impl Token {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
Interpolated(ref nt) => match &**nt {
Interpolated(ref nt, _) => match &**nt {
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
ast::ExprKind::Lit(_) => true,
@@ -476,7 +485,7 @@ impl Token {
// otherwise returns the original token.
pub fn uninterpolate(&self) -> Cow<'_, Token> {
match &self.kind {
Interpolated(nt) => match **nt {
Interpolated(nt, _) => match **nt {
NtIdent(ident, is_raw) => {
Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
}
@@ -523,7 +532,7 @@ impl Token {

/// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool {
if let Interpolated(ref nt) = self.kind {
if let Interpolated(ref nt, _) = self.kind {
if let NtPath(..) = **nt {
return true;
}
@@ -535,7 +544,7 @@ impl Token {
/// That is, is this a pre-parsed expression dropped into the token stream
/// (which happens while parsing the result of macro expansion)?
pub fn is_whole_expr(&self) -> bool {
if let Interpolated(ref nt) = self.kind {
if let Interpolated(ref nt, _) = self.kind {
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
return true;
}
@@ -546,7 +555,7 @@ impl Token {

// Is the token an interpolated block (`$b:block`)?
pub fn is_whole_block(&self) -> bool {
if let Interpolated(ref nt) = self.kind {
if let Interpolated(ref nt, _) = self.kind {
if let NtBlock(..) = **nt {
return true;
}
@@ -724,7 +733,7 @@ impl Token {
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
}

(&Interpolated(_), &Interpolated(_)) => false,
(&Interpolated(..), &Interpolated(..)) => false,

_ => panic!("forgot to add a token?"),
}
2 changes: 1 addition & 1 deletion src/librustc_ast/util/literal.rs
Original file line number Diff line number Diff line change
@@ -205,7 +205,7 @@ impl Lit {
token::Lit::new(token::Bool, name, None)
}
token::Literal(lit) => lit,
token::Interpolated(ref nt) => {
token::Interpolated(ref nt, _) => {
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
if let ast::ExprKind::Lit(lit) = &expr.kind {
return Ok(lit.clone());
2 changes: 1 addition & 1 deletion src/librustc_ast_lowering/lib.rs
Original file line number Diff line number Diff line change
@@ -1027,7 +1027,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {

fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
token::Interpolated(nt, _) => {
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
self.lower_token_stream(tts)
}
2 changes: 1 addition & 1 deletion src/librustc_ast_pretty/pprust.rs
Original file line number Diff line number Diff line change
@@ -266,7 +266,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Unknown(s) => s.to_string(),

token::Interpolated(ref nt) => nonterminal_to_string(nt),
token::Interpolated(ref nt, _) => nonterminal_to_string(nt),
}
}

6 changes: 3 additions & 3 deletions src/librustc_expand/base.rs
Original file line number Diff line number Diff line change
@@ -4,7 +4,7 @@ use crate::module::DirectoryOwnership;
use rustc_ast::ast::{self, Attribute, NodeId, PatKind};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::ptr::P;
use rustc_ast::token;
use rustc_ast::token::{self, FlattenGroup};
use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
use rustc_ast::visit::{AssocCtxt, Visitor};
use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability};
@@ -142,7 +142,7 @@ impl Annotatable {
| Annotatable::StructField(..)
| Annotatable::Variant(..) => panic!("unexpected annotatable"),
};
TokenTree::token(token::Interpolated(Lrc::new(nt)), DUMMY_SP).into()
TokenTree::token(token::Interpolated(Lrc::new(nt), FlattenGroup::Yes), DUMMY_SP).into()
}

pub fn expect_item(self) -> P<ast::Item> {
@@ -374,7 +374,7 @@ where
impl MutVisitor for AvoidInterpolatedIdents {
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
if let tokenstream::TokenTree::Token(token) = tt {
if let token::Interpolated(nt) = &token.kind {
if let token::Interpolated(nt, _) = &token.kind {
if let token::NtIdent(ident, is_raw) = **nt {
*tt = tokenstream::TokenTree::token(
token::Ident(ident.name, is_raw),
10 changes: 5 additions & 5 deletions src/librustc_expand/mbe/macro_parser.rs
Original file line number Diff line number Diff line change
@@ -785,12 +785,12 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
sym::literal => token.can_begin_literal_maybe_minus(),
sym::vis => match token.kind {
// The follow-set of :vis + "priv" keyword + interpolated
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
token::Comma | token::Ident(..) | token::Interpolated(..) => true,
_ => token.can_begin_type(),
},
sym::block => match token.kind {
token::OpenDelim(token::Brace) => true,
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt, _) => match **nt {
token::NtItem(_)
| token::NtPat(_)
| token::NtTy(_)
@@ -804,7 +804,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
},
sym::path | sym::meta => match token.kind {
token::ModSep | token::Ident(..) => true,
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt, _) => match **nt {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt),
},
@@ -823,12 +823,12 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
token::ModSep | // path
token::Lt | // path (UFCS constant)
token::BinOp(token::Shl) => true, // path (double UFCS)
token::Interpolated(ref nt) => may_be_ident(nt),
token::Interpolated(ref nt, _) => may_be_ident(nt),
_ => false,
},
sym::lifetime => match token.kind {
token::Lifetime(_) => true,
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt, _) => match **nt {
token::NtLifetime(_) | token::NtTT(_) => true,
_ => false,
},
7 changes: 5 additions & 2 deletions src/librustc_expand/mbe/transcribe.rs
Original file line number Diff line number Diff line change
@@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};

use rustc_ast::ast::MacCall;
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, NtTT, Token};
use rustc_ast::token::{self, FlattenGroup, NtTT, Token};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
@@ -240,7 +240,10 @@ pub(super) fn transcribe<'a>(
result.push(tt.clone().into());
} else {
marker.visit_span(&mut sp);
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
let token = TokenTree::token(
token::Interpolated(nt.clone(), FlattenGroup::No),
sp,
);
result.push(token.into());
}
} else {
4 changes: 2 additions & 2 deletions src/librustc_expand/proc_macro.rs
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@ use crate::base::{self, *};
use crate::proc_macro_server;

use rustc_ast::ast::{self, ItemKind, MetaItemKind, NestedMetaItem};
use rustc_ast::token;
use rustc_ast::token::{self, FlattenGroup};
use rustc_ast::tokenstream::{self, TokenStream};
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, ErrorReported};
@@ -102,7 +102,7 @@ impl MultiItemModifier for ProcMacroDerive {
}
}

let token = token::Interpolated(Lrc::new(token::NtItem(item)));
let token = token::Interpolated(Lrc::new(token::NtItem(item)), FlattenGroup::Yes);
let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();

let server = proc_macro_server::Rustc::new(ecx);
38 changes: 26 additions & 12 deletions src/librustc_expand/proc_macro_server.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::base::ExtCtxt;

use rustc_ast::ast;
use rustc_ast::token;
use rustc_ast::token::{self, FlattenGroup};
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
use rustc_ast::util::comments;
use rustc_ast_pretty::pprust;
@@ -60,7 +60,12 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group { delimiter, stream: tts, span });
return TokenTree::Group(Group {
delimiter,
stream: tts,
span,
flatten: FlattenGroup::No,
});
}
tokenstream::TokenTree::Token(token) => token,
};
@@ -167,19 +172,21 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
delimiter: Delimiter::Bracket,
stream,
span: DelimSpan::from_single(span),
flatten: FlattenGroup::No,
}));
if style == ast::AttrStyle::Inner {
stack.push(tt!(Punct::new('!', false)));
}
tt!(Punct::new('#', false))
}

Interpolated(nt) => {
Interpolated(nt, flatten) => {
let stream = nt_to_tokenstream(&nt, sess, span);
TokenTree::Group(Group {
delimiter: Delimiter::None,
stream,
span: DelimSpan::from_single(span),
flatten,
})
}

@@ -195,7 +202,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {

let (ch, joint, span) = match self {
TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
TokenTree::Group(Group { delimiter, stream, span }) => {
TokenTree::Group(Group { delimiter, stream, span, .. }) => {
return tokenstream::TokenTree::Delimited(span, delimiter.to_internal(), stream)
.into();
}
@@ -283,6 +290,10 @@ pub struct Group {
delimiter: Delimiter,
stream: TokenStream,
span: DelimSpan,
/// A hack used to pass AST fragments to attribute and derive macros
/// as a single nonterminal token instead of a token stream.
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
flatten: FlattenGroup,
}

#[derive(Copy, Clone, PartialEq, Eq, Hash)]
@@ -437,14 +448,12 @@ impl server::TokenStreamIter for Rustc<'_> {
let next = iter.cursor.next_with_joint()?;
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
})?;
// HACK: The condition "dummy span + group with empty delimiter" represents an AST
// fragment approximately converted into a token stream. This may happen, for
// example, with inputs to proc macro attributes, including derives. Such "groups"
// need to flattened during iteration over stream's token trees.
// Eventually this needs to be removed in favor of keeping original token trees
// and not doing the roundtrip through AST.
// A hack used to pass AST fragments to attribute and derive macros
// as a single nonterminal token instead of a token stream.
// Such token needs to be "unwrapped" and not represented as a delimited group.
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
if let TokenTree::Group(ref group) = tree {
if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
if matches!(group.flatten, FlattenGroup::Yes) {
iter.cursor.append(group.stream.clone());
continue;
}
@@ -456,7 +465,12 @@ impl server::TokenStreamIter for Rustc<'_> {

impl server::Group for Rustc<'_> {
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
Group { delimiter, stream, span: DelimSpan::from_single(server::Span::call_site(self)) }
Group {
delimiter,
stream,
span: DelimSpan::from_single(server::Span::call_site(self)),
flatten: FlattenGroup::No,
}
}
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
group.delimiter
4 changes: 2 additions & 2 deletions src/librustc_parse/parser/attr.rs
Original file line number Diff line number Diff line change
@@ -155,7 +155,7 @@ impl<'a> Parser<'a> {
/// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
let item = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt, _) => match **nt {
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
_ => None,
},
@@ -254,7 +254,7 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt, _) => match **nt {
token::NtMeta(ref e) => Some(e.clone()),
_ => None,
},
2 changes: 1 addition & 1 deletion src/librustc_parse/parser/expr.rs
Original file line number Diff line number Diff line change
@@ -26,7 +26,7 @@ use std::mem;
/// `token::Interpolated` tokens.
macro_rules! maybe_whole_expr {
($p:expr) => {
if let token::Interpolated(nt) = &$p.token.kind {
if let token::Interpolated(nt, _) = &$p.token.kind {
match &**nt {
token::NtExpr(e) | token::NtLiteral(e) => {
let e = e.clone();
2 changes: 1 addition & 1 deletion src/librustc_parse/parser/item.rs
Original file line number Diff line number Diff line change
@@ -1780,7 +1780,7 @@ impl<'a> Parser<'a> {

fn is_named_param(&self) -> bool {
let offset = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt, _) => match **nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
},
6 changes: 3 additions & 3 deletions src/librustc_parse/parser/mod.rs
Original file line number Diff line number Diff line change
@@ -54,7 +54,7 @@ enum BlockMode {
#[macro_export]
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = &$p.token.kind {
if let token::Interpolated(nt, _) = &$p.token.kind {
if let token::$constructor(x) = &**nt {
let $x = x.clone();
$p.bump();
@@ -69,7 +69,7 @@ macro_rules! maybe_whole {
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
if let token::Interpolated(nt) = &$self.token.kind {
if let token::Interpolated(nt, _) = &$self.token.kind {
if let token::NtTy(ty) = &**nt {
let ty = ty.clone();
$self.bump();
@@ -922,7 +922,7 @@ impl<'a> Parser<'a> {
if self.eat(&token::Eq) {
let eq_span = self.prev_token.span;
let mut is_interpolated_expr = false;
if let token::Interpolated(nt) = &self.token.kind {
if let token::Interpolated(nt, _) = &self.token.kind {
if let token::NtExpr(..) = **nt {
is_interpolated_expr = true;
}
2 changes: 1 addition & 1 deletion src/librustc_parse/parser/pat.rs
Original file line number Diff line number Diff line change
@@ -515,7 +515,7 @@ impl<'a> Parser<'a> {
self.recover_additional_muts();

// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
if let token::Interpolated(ref nt) = self.token.kind {
if let token::Interpolated(ref nt, _) = self.token.kind {
if let token::NtPat(_) = **nt {
self.expected_ident_found().emit();
}
2 changes: 1 addition & 1 deletion src/librustc_resolve/build_reduced_graph.rs
Original file line number Diff line number Diff line change
@@ -1325,7 +1325,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
}

fn visit_token(&mut self, t: Token) {
if let token::Interpolated(nt) = t.kind {
if let token::Interpolated(nt, _) = t.kind {
if let token::NtExpr(ref expr) = *nt {
if let ast::ExprKind::MacCall(..) = expr.kind {
self.visit_invoc(expr.id);
2 changes: 1 addition & 1 deletion src/librustc_resolve/def_collector.rs
Original file line number Diff line number Diff line change
@@ -256,7 +256,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
}

fn visit_token(&mut self, t: Token) {
if let token::Interpolated(nt) = t.kind {
if let token::Interpolated(nt, _) = t.kind {
if let token::NtExpr(ref expr) = *nt {
if let ExprKind::MacCall(..) = expr.kind {
self.visit_macro_invoc(expr.id);