From 67f4e3cbaafd535942abadd6b41f6a05f3908600 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 15 Aug 2023 15:47:58 +1000 Subject: [PATCH] Remove NtIdent/NtLifetime/Nonterminal/Token::Interpolated. Remnants of `Token::Interpolated` still exist in the form of the new `token::IdentMv` and `token::LifetimeMv` tokens. I did them like that because there's a lot of code that assumes an interpolated ident/lifetime fits in a single token, and changing all that code to work with invisible delimiters would have been a pain. (Maybe it could be done in a follow-up.) Fully kills off the "captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens" restriction. --- compiler/rustc_ast/src/ast_traits.rs | 14 -- compiler/rustc_ast/src/mut_visit.rs | 39 +--- compiler/rustc_ast/src/token.rs | 184 +++++++----------- compiler/rustc_ast/src/tokenstream.rs | 49 +++-- compiler/rustc_ast_pretty/src/pprust/mod.rs | 6 +- compiler/rustc_ast_pretty/src/pprust/state.rs | 15 +- compiler/rustc_builtin_macros/src/cfg_eval.rs | 10 +- compiler/rustc_expand/src/config.rs | 13 +- compiler/rustc_expand/src/mbe/diagnostics.rs | 14 +- compiler/rustc_expand/src/mbe/transcribe.rs | 17 +- .../rustc_expand/src/proc_macro_server.rs | 28 +-- .../rustc_parse/src/parser/attr_wrapper.rs | 2 +- compiler/rustc_parse/src/parser/expr.rs | 16 +- compiler/rustc_parse/src/parser/mod.rs | 34 ++-- .../rustc_parse/src/parser/nonterminal.rs | 158 ++++++--------- compiler/rustc_parse/src/parser/ty.rs | 2 +- 16 files changed, 231 insertions(+), 370 deletions(-) diff --git a/compiler/rustc_ast/src/ast_traits.rs b/compiler/rustc_ast/src/ast_traits.rs index 8a7783e30f8e9..060f643bc8d83 100644 --- a/compiler/rustc_ast/src/ast_traits.rs +++ b/compiler/rustc_ast/src/ast_traits.rs @@ -3,7 +3,6 @@ //! The traits are not implemented exhaustively, only when actually necessary. use crate::ptr::P; -use crate::token::Nonterminal; use crate::tokenstream::LazyAttrTokenStream; use crate::{Arm, Crate, ExprField, FieldDef, GenericParam, Param, PatField, Variant}; use crate::{AssocItem, Expr, ForeignItem, Item, NodeId}; @@ -228,19 +227,6 @@ impl HasTokens for Attribute { } } -impl HasTokens for Nonterminal { - fn tokens(&self) -> Option<&LazyAttrTokenStream> { - match self { - Nonterminal::NtIdent(..) | Nonterminal::NtLifetime(..) => None, - } - } - fn tokens_mut(&mut self) -> Option<&mut Option> { - match self { - Nonterminal::NtIdent(..) | Nonterminal::NtLifetime(..) => None, - } - } -} - /// A trait for AST nodes having (or not having) attributes. pub trait HasAttrs { /// This is `true` if this `HasAttrs` might support 'custom' (proc-macro) inner diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index 2980187722aff..f823194265e8a 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -755,45 +755,18 @@ pub fn visit_token(t: &mut Token, vis: &mut T) { *span = ident.span; return; // Avoid visiting the span for the second time. } - token::Interpolated(nt) => { - visit_nonterminal(Lrc::make_mut(nt), vis); + token::InterpolatedIdent(name, _, uninterpolated_span) + | token::InterpolatedLifetime(name, uninterpolated_span) => { + let mut ident = Ident::new(*name, *uninterpolated_span); + vis.visit_ident(&mut ident); + *name = ident.name; + *uninterpolated_span = ident.span; } _ => {} } vis.visit_span(span); } -// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. -/// Applies the visitor to elements of interpolated nodes. -// -// N.B., this can occur only when applying a visitor to partially expanded -// code, where parsed pieces have gotten implanted ito *other* macro -// invocations. This is relevant for macro hygiene, but possibly not elsewhere. -// -// One problem here occurs because the types for flat_map_item, flat_map_stmt, -// etc., allow the visitor to return *multiple* items; this is a problem for the -// nodes here, because they insist on having exactly one piece. One solution -// would be to mangle the MutVisitor trait to include one-to-many and -// one-to-one versions of these entry points, but that would probably confuse a -// lot of people and help very few. Instead, I'm just going to put in dynamic -// checks. I think the performance impact of this will be pretty much -// nonexistent. The danger is that someone will apply a `MutVisitor` to a -// partially expanded node, and will be confused by the fact that their -// `flat_map_item` or `flat_map_stmt` isn't getting called on `NtItem` or `NtStmt` -// nodes. Hopefully they'll wind up reading this comment, and doing something -// appropriate. -// -// BTW, design choice: I considered just changing the type of, e.g., `NtItem` to -// contain multiple items, but decided against it when I looked at -// `parse_item_or_view_item` and tried to figure out what I would do with -// multiple items there.... -pub fn visit_nonterminal(nt: &mut token::Nonterminal, vis: &mut T) { - match nt { - token::NtIdent(ident, _is_raw) => vis.visit_ident(ident), - token::NtLifetime(ident) => vis.visit_ident(ident), - } -} - // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. pub fn visit_defaultness(defaultness: &mut Defaultness, vis: &mut T) { match defaultness { diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index f9977ae2ee1b9..0b5637fd0b87b 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -1,13 +1,10 @@ pub use BinOpToken::*; pub use LitKind::*; -pub use Nonterminal::*; pub use TokenKind::*; use crate::ast; use crate::util::case::Case; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::Lrc; use rustc_macros::HashStable_Generic; use rustc_span::symbol::{kw, sym}; #[allow(hidden_glob_reexports)] @@ -262,9 +259,7 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } -// SAFETY: due to the `Clone` impl below, all fields of all variants other than -// `Interpolated` must impl `Copy`. -#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub enum TokenKind { /* Expression-operator symbols. */ Eq, @@ -308,26 +303,23 @@ pub enum TokenKind { Literal(Lit), /// Identifier token. - /// Do not forget about `NtIdent` when you want to match on identifiers. + /// Do not forget about `InterpolatedIdent` when you want to match on identifiers. /// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to /// treat regular and interpolated identifiers in the same way. Ident(Symbol, /* is_raw */ bool), + /// This `Span` is the span of the original identifier passed to the + /// declarative macro. The span in the `Token` is the span of the `ident` + /// metavariable in the macro's RHS. + InterpolatedIdent(Symbol, /* is_raw */ bool, Span), /// Lifetime identifier token. - /// Do not forget about `NtLifetime` when you want to match on lifetime identifiers. + /// Do not forget about `InterpolatedLIfetime` when you want to match on lifetime identifiers. /// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to /// treat regular and interpolated lifetime identifiers in the same way. Lifetime(Symbol), - - /// An embedded AST node, as produced by a macro. This only exists for - /// historical reasons. We'd like to get rid of it, for multiple reasons. - /// - It's conceptually very strange. Saying a token can contain an AST - /// node is like saying, in natural language, that a word can contain a - /// sentence. - /// - It requires special handling in a bunch of places in the parser. - /// - It prevents `Token` from implementing `Copy`. - /// It adds complexity and likely slows things down. Please don't add new - /// occurrences of this token kind! - Interpolated(Lrc), + /// This `Span` is the span of the original lifetime passed to the + /// declarative macro. The span in the `Token` is the span of the + /// `lifetime` metavariable in the macro's RHS. + InterpolatedLifetime(Symbol, Span), /// A doc comment token. /// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc) @@ -337,19 +329,6 @@ pub enum TokenKind { Eof, } -impl Clone for TokenKind { - fn clone(&self) -> Self { - // `TokenKind` would impl `Copy` if it weren't for `Interpolated`. So - // for all other variants, this implementation of `clone` is just like - // a copy. This is faster than the `derive(Clone)` version which has a - // separate path for every variant. - match self { - Interpolated(nt) => Interpolated(nt.clone()), - _ => unsafe { std::ptr::read(self) }, - } - } -} - #[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub struct Token { pub kind: TokenKind, @@ -433,8 +412,12 @@ impl Token { /// Note that keywords are also identifiers, so they should use this /// if they keep spans or perform edition checks. pub fn uninterpolated_span(&self) -> Span { - match &self.kind { - Interpolated(nt) => nt.span(), + match self.kind { + InterpolatedIdent(_, _, uninterpolated_span) + | InterpolatedLifetime(_, uninterpolated_span) => uninterpolated_span, + OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(kind))) => { + panic!("njn: uninterpolated_span {kind:?}"); + } _ => self.span, } } @@ -449,8 +432,15 @@ impl Token { | BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => true, - OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) - | Lifetime(..) | Interpolated(..) | Eof => false, + OpenDelim(..) + | CloseDelim(..) + | Literal(..) + | DocComment(..) + | Ident(..) + | InterpolatedIdent(..) + | Lifetime(..) + | InterpolatedLifetime(..) + | Eof => false, } } @@ -612,13 +602,13 @@ impl Token { /// into the regular identifier or lifetime token it refers to, /// otherwise returns the original token. pub fn uninterpolate(&self) -> Cow<'_, Token> { - match &self.kind { - Interpolated(nt) => match **nt { - NtIdent(ident, is_raw) => { - Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span)) - } - NtLifetime(ident) => Cow::Owned(Token::new(Lifetime(ident.name), ident.span)), - }, + match self.kind { + InterpolatedIdent(name, is_raw, uninterpolated_span) => { + Cow::Owned(Token::new(Ident(name, is_raw), uninterpolated_span)) + } + InterpolatedLifetime(name, uninterpolated_span) => { + Cow::Owned(Token::new(Lifetime(name), uninterpolated_span)) + } _ => Cow::Borrowed(self), } } @@ -627,12 +617,11 @@ impl Token { #[inline] pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> { // We avoid using `Token::uninterpolate` here because it's slow. - match &self.kind { - &Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)), - Interpolated(nt) => match **nt { - NtIdent(ident, is_raw) => Some((ident, is_raw)), - _ => None, - }, + match self.kind { + Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)), + InterpolatedIdent(name, is_raw, uninterpolated_span) => { + Some((Ident::new(name, uninterpolated_span), is_raw)) + } _ => None, } } @@ -641,12 +630,11 @@ impl Token { #[inline] pub fn lifetime(&self) -> Option { // We avoid using `Token::uninterpolate` here because it's slow. - match &self.kind { - &Lifetime(name) => Some(Ident::new(name, self.span)), - Interpolated(nt) => match **nt { - NtLifetime(ident) => Some(ident), - _ => None, - }, + match self.kind { + Lifetime(name) => Some(Ident::new(name, self.span)), + InterpolatedLifetime(name, uninterpolated_span) => { + Some(Ident::new(name, uninterpolated_span)) + } _ => None, } } @@ -822,10 +810,35 @@ impl Token { _ => return None, }, - Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot - | DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar - | Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) - | Lifetime(..) | Interpolated(..) | DocComment(..) | Eof => return None, + Le + | EqEq + | Ne + | Ge + | AndAnd + | OrOr + | Tilde + | BinOpEq(..) + | At + | DotDotDot + | DotDotEq + | Comma + | Semi + | ModSep + | RArrow + | LArrow + | FatArrow + | Pound + | Dollar + | Question + | OpenDelim(..) + | CloseDelim(..) + | Literal(..) + | Ident(..) + | InterpolatedIdent(..) + | Lifetime(..) + | InterpolatedLifetime(..) + | DocComment(..) + | Eof => return None, }; Some(Token::new(kind, self.span.to(joint.span))) @@ -839,13 +852,8 @@ impl PartialEq for Token { } } -#[derive(Clone, Encodable, Decodable)] -/// For interpolation during macro expansion. -pub enum Nonterminal { - NtIdent(Ident, /* is_raw */ bool), - NtLifetime(Ident), -} - +// njn: introduce cut-back version lacking Ident/Lifetime? +// - could that simplify the Pat cases too? #[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)] pub enum NonterminalKind { Item, @@ -859,6 +867,7 @@ pub enum NonterminalKind { PatWithOr, Expr, Ty, + //njn: explain how these are never put in Invisible delims Ident, Lifetime, Literal, @@ -924,48 +933,6 @@ impl fmt::Display for NonterminalKind { } } -impl Nonterminal { - pub fn span(&self) -> Span { - match self { - NtIdent(ident, _) | NtLifetime(ident) => ident.span, - } - } -} - -impl PartialEq for Nonterminal { - fn eq(&self, rhs: &Self) -> bool { - match (self, rhs) { - (NtIdent(ident_lhs, is_raw_lhs), NtIdent(ident_rhs, is_raw_rhs)) => { - ident_lhs == ident_rhs && is_raw_lhs == is_raw_rhs - } - (NtLifetime(ident_lhs), NtLifetime(ident_rhs)) => ident_lhs == ident_rhs, - // FIXME: Assume that all "complex" nonterminal are not equal, we can't compare them - // correctly based on data from AST. This will prevent them from matching each other - // in macros. The comparison will become possible only when each nonterminal has an - // attached token stream from which it was parsed. - _ => false, - } - } -} - -impl fmt::Debug for Nonterminal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - NtIdent(..) => f.pad("NtIdent(..)"), - NtLifetime(..) => f.pad("NtLifetime(..)"), - } - } -} - -impl HashStable for Nonterminal -where - CTX: crate::HashStableContext, -{ - fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) { - panic!("interpolated tokens should not be present in the HIR") - } -} - // Some types are used a lot. Make sure they don't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] mod size_asserts { @@ -974,7 +941,6 @@ mod size_asserts { // tidy-alphabetical-start static_assert_size!(Lit, 12); static_assert_size!(LitKind, 2); - static_assert_size!(Nonterminal, 16); static_assert_size!(Token, 24); static_assert_size!(TokenKind, 16); // tidy-alphabetical-end diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index c116e96728851..5b6def26d152a 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -15,7 +15,7 @@ use crate::ast::AttrStyle; use crate::ast_traits::{HasAttrs, HasSpan, HasTokens}; -use crate::token::{self, Delimiter, InvisibleSource, Nonterminal, Token, TokenKind}; +use crate::token::{self, Delimiter, InvisibleSource, Token, TokenKind}; use crate::AttrVec; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -299,11 +299,6 @@ pub struct AttributesData { } /// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s. -/// -/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s -/// instead of a representation of the abstract syntax tree. -/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for -/// backwards compatibility. #[derive(Clone, Debug, Default, Encodable, Decodable)] pub struct TokenStream(pub(crate) Lrc>); @@ -444,7 +439,6 @@ impl TokenStream { let Some(tokens) = node.tokens() else { panic!("missing tokens for node at {:?}: {:?}", node.span(), node); }; - //eprintln!("from_ast: {tokens:#?}"); let attrs = node.attrs(); let attr_stream = if attrs.is_empty() { tokens.to_attr_token_stream() @@ -456,26 +450,16 @@ impl TokenStream { attr_stream.to_tokenstream() } - pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream { - match nt { - Nonterminal::NtIdent(ident, is_raw) => { - TokenStream::token_alone(token::Ident(ident.name, *is_raw), ident.span) - } - Nonterminal::NtLifetime(ident) => { - TokenStream::token_alone(token::Lifetime(ident.name), ident.span) - } - } - } - fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree { - match &token.kind { - token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = **nt => { - TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing) - } - token::Interpolated(nt) => TokenTree::Delimited( + match token.kind { + token::InterpolatedIdent(name, is_raw, uninterpolated_span) => TokenTree::Token( + Token::new(token::Ident(name, is_raw), uninterpolated_span), + spacing, + ), + token::InterpolatedLifetime(name, uninterpolated_span) => TokenTree::Delimited( DelimSpan::from_single(token.span), Delimiter::Invisible(InvisibleSource::FlattenToken), - TokenStream::from_nonterminal_ast(nt).flattened(), + TokenStream::token_alone(token::Lifetime(name), uninterpolated_span), ), _ => TokenTree::Token(token.clone(), spacing), } @@ -494,7 +478,10 @@ impl TokenStream { pub fn flattened(&self) -> TokenStream { fn can_skip(stream: &TokenStream) -> bool { stream.trees().all(|tree| match tree { - TokenTree::Token(token, _) => !matches!(token.kind, token::Interpolated(_)), + TokenTree::Token(token, _) => !matches!( + token.kind, + token::InterpolatedIdent(..) | token::InterpolatedLifetime(..) + ), TokenTree::Delimited(_, _, inner) => can_skip(inner), }) } @@ -702,6 +689,18 @@ impl TokenTreeCursor { pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { self.stream.0.get(self.index + n) } + + // Computes the span for the entire stream. + pub fn span(&self) -> Span { + if self.stream.is_empty() { + DUMMY_SP + } else { + // Unwrapping safe because we checked for emptiness above. + let lo = self.stream.0.first().unwrap().span(); + let hi = self.stream.0.last().unwrap().span(); + lo.to(hi) + } + } } #[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] diff --git a/compiler/rustc_ast_pretty/src/pprust/mod.rs b/compiler/rustc_ast_pretty/src/pprust/mod.rs index 83b7e13905aee..0ee7e9c08d362 100644 --- a/compiler/rustc_ast_pretty/src/pprust/mod.rs +++ b/compiler/rustc_ast_pretty/src/pprust/mod.rs @@ -5,15 +5,11 @@ pub mod state; pub use state::{print_crate, AnnNode, Comments, PpAnn, PrintState, State}; use rustc_ast as ast; -use rustc_ast::token::{Nonterminal, Token, TokenKind}; +use rustc_ast::token::{Token, TokenKind}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use std::borrow::Cow; -pub fn nonterminal_to_string(nt: &Nonterminal) -> String { - State::new().nonterminal_to_string(nt) -} - /// Print the token kind precisely, without converting `$crate` into its respective crate name. pub fn token_kind_to_string(tok: &TokenKind) -> Cow<'static, str> { State::new().token_kind_to_string(tok) diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 7a330ab8ea022..054f987c19b44 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -6,7 +6,7 @@ use crate::pp::Breaks::{Consistent, Inconsistent}; use crate::pp::{self, Breaks}; use rustc_ast::attr::AttrIdGenerator; use rustc_ast::ptr::P; -use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Nonterminal, Token, TokenKind}; +use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::util::classify; use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle}; @@ -724,13 +724,6 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere } } - fn nonterminal_to_string(&self, nt: &Nonterminal) -> String { - match nt { - token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(), - token::NtLifetime(e) => e.to_string(), - } - } - /// Print the token kind precisely, without converting `$crate` into its respective crate name. fn token_kind_to_string(&self, tok: &TokenKind) -> Cow<'static, str> { self.token_kind_to_string_ext(tok, None) @@ -786,18 +779,16 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere token::Literal(lit) => literal_to_string(lit).into(), /* Name components */ - token::Ident(s, is_raw) => { + token::Ident(s, is_raw) | token::InterpolatedIdent(s, is_raw, _) => { IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string().into() } - token::Lifetime(s) => s.to_string().into(), + token::Lifetime(s) | token::InterpolatedLifetime(s, _) => s.to_string().into(), /* Other */ token::DocComment(comment_kind, attr_style, data) => { doc_comment_to_string(comment_kind, attr_style, data).into() } token::Eof => "".into(), - - token::Interpolated(ref nt) => self.nonterminal_to_string(nt).into(), } } diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs index f826c6e7712d2..89dc06bc0f793 100644 --- a/compiler/rustc_builtin_macros/src/cfg_eval.rs +++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs @@ -177,16 +177,14 @@ impl CfgEval<'_, '_> { _ => unreachable!(), }; - // 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`) - // to `None`-delimited groups containing the corresponding tokens. This - // is normally delayed until the proc-macro server actually needs to - // provide a `TokenKind::Interpolated` to a proc-macro. We do this earlier, + // Flatten interpolated tokens + // (`TokenKind::Interpolated{Ident,Lifetime}`) appropriately. This is + // normally delayed until the proc-macro server actually needs to + // provide an interpolated token to a proc-macro. We do this earlier, // so that we can handle cases like: - // // ```rust // #[cfg_eval] #[cfg] $item //``` - // // where `$item` is `#[cfg_attr] struct Foo {}`. We want to make // sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest // way to do this is to do a single parse of a stream without any nonterminals. diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 8658cea137a7d..f793cde77f0b8 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -267,14 +267,13 @@ impl<'a> StripUnconfigured<'a> { } AttrTokenTree::Delimited(sp, delim, mut inner) => { inner = self.configure_tokens(&inner); - Some(AttrTokenTree::Delimited(sp, delim, inner)) - .into_iter() + Some(AttrTokenTree::Delimited(sp, delim, inner)).into_iter() } - AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(nt) = &token.kind => { - panic!( - "Nonterminal should have been flattened at {:?}: {:?}", - token.span, nt - ); + AttrTokenTree::Token(Token { + kind: TokenKind::InterpolatedIdent(..) | TokenKind::InterpolatedLifetime(..), + .. + }, _) => { + panic!("Nonterminal should have been flattened: {:?}", tree); } AttrTokenTree::Token(token, spacing) => { Some(AttrTokenTree::Token(token, spacing)).into_iter() diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index e060375646c2d..233803c5155e6 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -4,7 +4,7 @@ use crate::mbe::{ macro_parser::{MatcherLoc, NamedParseResult, ParseResult::*, TtParser}, macro_rules::{try_match_macro, Tracker}, }; -use rustc_ast::token::{self, Token, TokenKind}; +use rustc_ast::token::{self, Token}; use rustc_ast::tokenstream::TokenStream; use rustc_ast_pretty::pprust; use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage}; @@ -63,18 +63,6 @@ pub(super) fn failed_to_match_macro<'cx>( err.note(format!("while trying to match {remaining_matcher}")); } - if let MatcherLoc::Token { token: expected_token } = &remaining_matcher - && (matches!(expected_token.kind, TokenKind::Interpolated(_)) - || matches!(token.kind, TokenKind::Interpolated(_))) - { - err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens"); - err.note("see for more information"); - - if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) { - err.help("try using `:tt` instead in the macro definition"); - } - } - // Check whether there's a missing comma in this macro call, like `println!("{}" a);` if let Some((arg, comma_span)) = arg.add_comma() { for lhs in lhses { diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 3d3bb27ce51ea..5318e1349a802 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -236,6 +236,7 @@ pub(super) fn transcribe<'a>( // without wrapping them into groups. tt.clone() } + // njn: remove all the `ref`s MatchedSingle(ParseNtResult::Item(ref item)) => { mk_delimited(NonterminalKind::Item, TokenStream::from_ast(item)) } @@ -264,6 +265,16 @@ pub(super) fn transcribe<'a>( MatchedSingle(ParseNtResult::Literal(ref expr)) => { mk_delimited(NonterminalKind::Literal, TokenStream::from_ast(expr)) } + MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => { + marker.visit_span(&mut sp); + let kind = token::InterpolatedIdent(ident.name, *is_raw, ident.span); + TokenTree::token_alone(kind, sp) + } + MatchedSingle(ParseNtResult::Lifetime(ref ident)) => { + marker.visit_span(&mut sp); + let kind = token::InterpolatedLifetime(ident.name, ident.span); + TokenTree::token_alone(kind, sp) + } MatchedSingle(ParseNtResult::Ty(ref ty)) => { mk_delimited(NonterminalKind::Ty, TokenStream::from_ast(ty)) } @@ -276,12 +287,6 @@ pub(super) fn transcribe<'a>( MatchedSingle(ParseNtResult::Vis(ref vis)) => { mk_delimited(NonterminalKind::Vis, TokenStream::from_ast(vis)) } - MatchedSingle(ParseNtResult::Nt(nt)) => { - // `Interpolated` is currently used to maintain parsing priorities for - // these cases, but will eventually be removed. - marker.visit_span(&mut sp); - TokenTree::token_alone(token::Interpolated(nt.clone()), sp) - } MatchedSeq(..) => { // We were unable to descend far enough. This is an error. return Err(cx.create_err(VarStillRepeating { span: sp, ident })); diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 32fed69fe0732..e2d2b0455f848 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -202,6 +202,10 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec op("'"), Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { sym, is_raw, span })), + InterpolatedIdent(sym, is_raw, uninterpolated_span) => { + trees.push(TokenTree::Ident(Ident { sym, is_raw, span: uninterpolated_span })) + } + Lifetime(name) => { let ident = symbol::Ident::new(name, span).without_first_quote(); trees.extend([ @@ -209,6 +213,16 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec { + let stream = + TokenStream::token_alone(token::Lifetime(name), uninterpolated_span); + trees.push(TokenTree::Group(Group { + delimiter: pm::Delimiter::None, + stream: Some(stream), + span: DelimSpan::from_single(span), + })) + } + Literal(token::Lit { kind, symbol, suffix }) => { trees.push(TokenTree::Literal(self::Literal { kind: FromInternal::from_internal(kind), @@ -217,6 +231,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec { let mut escaped = String::new(); for ch in data.as_str().chars() { @@ -241,19 +256,6 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec { - trees.push(TokenTree::Ident(Ident { sym: ident.name, is_raw, span: ident.span })) - } - - Interpolated(nt) => { - let stream = TokenStream::from_nonterminal_ast(&nt); - trees.push(TokenTree::Group(Group { - delimiter: pm::Delimiter::None, - stream: Some(stream), - span: DelimSpan::from_single(span), - })) - } - OpenDelim(..) | CloseDelim(..) => unreachable!(), Eof => unreachable!(), } diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 5d6c574baa612..b5d304751f5ba 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -460,6 +460,6 @@ mod size_asserts { use rustc_data_structures::static_assert_size; // tidy-alphabetical-start static_assert_size!(AttrWrapper, 16); - static_assert_size!(LazyAttrTokenStreamImpl, 104); + static_assert_size!(LazyAttrTokenStreamImpl, 96); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index a2b412662b5dc..c419526c586e1 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -711,8 +711,13 @@ impl<'a> Parser<'a> { /// token. fn interpolated_or_expr_span(&self, expr: &Expr) -> Span { match self.prev_token.kind { - TokenKind::Interpolated(..) => self.prev_token.span, + TokenKind::InterpolatedIdent(..) | TokenKind::InterpolatedLifetime(..) => { + // njn: backwards? + // `expr.span` is the interpolated span, which is what we want. + expr.span + } TokenKind::CloseDelim(Delimiter::Invisible(_)) => { + // njn: backwards? // `expr.span` is the interpolated span, because invisible open // and close delims both get marked with the same span, one // that covers the entire thing between them. (See @@ -1150,7 +1155,7 @@ impl<'a> Parser<'a> { let base1 = self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1)); let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); - self.bump_with((next_token2, self.token_spacing)); // `.` + self.bump_with(2, (next_token2, self.token_spacing)); // `.` self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None) } DestructuredFloat::Error => base, @@ -1178,7 +1183,7 @@ impl<'a> Parser<'a> { // after the float-like token, and therefore we have to make // the other parts of the parser think that there is a dot literal. self.token = Token::new(token::Ident(sym, false), sym_span); - self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing)); + self.bump_with(3, (Token::new(token::Dot, dot_span), self.token_spacing)); thin_vec![Ident::new(sym, sym_span)] } // 1.2 | 1.2e3 @@ -1202,7 +1207,7 @@ impl<'a> Parser<'a> { next_token: Option<(Token, Spacing)>, ) -> P { match next_token { - Some(next_token) => self.bump_with(next_token), + Some(next_token) => self.bump_with(4, next_token), None => self.bump(), } let span = self.prev_token.span; @@ -1452,7 +1457,7 @@ impl<'a> Parser<'a> { self.parse_expr_let() } else if self.eat_keyword(kw::Underscore) { Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore)) - } else if self.token.uninterpolated_span().at_least_rust_2018() { + } else if self.uninterpolated_token_span().at_least_rust_2018() { // `Span:.at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. if self.check_keyword(kw::Async) { if self.is_async_block() { @@ -1968,6 +1973,7 @@ impl<'a> Parser<'a> { return Err(err); } + // njn: remove in NtExpr/NtLiteral commit // if let token::Interpolated(nt) = &self.token.kind // && let token::NtExpr(e) | token::NtLiteral(e) = &**nt // && matches!(e.kind, ExprKind::Err) diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index c88fc9cba3621..11327722075e7 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -18,9 +18,7 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; pub use path::PathStyle; use rustc_ast::ptr::P; -use rustc_ast::token::{ - self, Delimiter, InvisibleSource, Nonterminal, NonterminalKind, Token, TokenKind, -}; +use rustc_ast::token::{self, Delimiter, InvisibleSource, NonterminalKind, Token, TokenKind}; use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing}; use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor}; use rustc_ast::util::case::Case; @@ -31,7 +29,6 @@ use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, Mutability, StrLit} use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind}; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::Lrc; use rustc_errors::PResult; use rustc_errors::{ Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan, @@ -769,7 +766,7 @@ impl<'a> Parser<'a> { self.break_last_token = true; // Use the spacing of the glued token as the spacing // of the unglued second token. - self.bump_with((Token::new(second, second_span), self.token_spacing)); + self.bump_with(6, (Token::new(second, second_span), self.token_spacing)); true } _ => { @@ -1088,15 +1085,16 @@ impl<'a> Parser<'a> { } /// Advance the parser by one token using provided token as the next one. - fn bump_with(&mut self, next: (Token, Spacing)) { - self.inlined_bump_with(next) + fn bump_with(&mut self, x: u32, next: (Token, Spacing)) { + self.inlined_bump_with(x, next) } /// This always-inlined version should only be used on hot code paths. #[inline(always)] - fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) { + fn inlined_bump_with(&mut self, _x: u32, (next_token, next_spacing): (Token, Spacing)) { // Update the current and previous tokens. self.prev_token = mem::replace(&mut self.token, next_token); + //eprintln!("bump `{:?}`", self.token); self.token_spacing = next_spacing; // Diagnostics. @@ -1117,12 +1115,13 @@ impl<'a> Parser<'a> { // Tweak the location for better diagnostics, but keep syntactic context intact. let fallback_span = self.token.span; next.0.span = fallback_span.with_ctxt(next.0.span.ctxt()); + //eprintln!("fallback {:?}", next.0.span); } debug_assert!(!matches!( next.0.kind, token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip() )); - self.inlined_bump_with(next) + self.inlined_bump_with(1, next) } /// Look-ahead `dist` tokens of `self.token` and get access to that token there. @@ -1558,14 +1557,12 @@ impl<'a> Parser<'a> { // njn: comment // njn: rename? pub fn uninterpolated_token_span(&self) -> Span { - match &self.token.kind { - token::Interpolated(nt) => nt.span(), - // njn: this pretty much assumes that it'll be a single token - // between the invisible delims. True for ident,lifetime, most - // literals, not true for `-1`. Could try to be more precise, match - // on the NonterminalKind as well + match self.token.kind { + token::InterpolatedIdent(_, _, uninterpolated_span) + | token::InterpolatedLifetime(_, uninterpolated_span) => uninterpolated_span, token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(_))) => { - self.look_ahead(1, |t| t.span) + // njn: explain + self.token_cursor.tree_cursor.span() } _ => self.token.span, } @@ -1631,11 +1628,10 @@ pub enum ParseNtResult { PatWithOr(P), Expr(P), // njn: combine with Literal? Literal(P), + Ident(Ident, /* is_raw */ bool), + Lifetime(Ident), Ty(P), Meta(P), Path(P), Vis(P), - - /// This case will eventually be removed, along with `Token::Interpolate`. - Nt(Lrc), } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index d2d629a0c972d..656cc866da747 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -1,8 +1,5 @@ use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, InvisibleSource, Nonterminal::*, NonterminalKind, Token}; -use rustc_ast::HasTokens; -use rustc_ast_pretty::pprust; -use rustc_data_structures::sync::Lrc; +use rustc_ast::token::{self, Delimiter, InvisibleSource, NonterminalKind, Token}; use rustc_errors::IntoDiagnostic; use rustc_errors::PResult; use rustc_span::symbol::{kw, Ident}; @@ -28,26 +25,17 @@ impl<'a> Parser<'a> { | PatWithOr | Expr | Ty - | Ident | Literal // `true`, `false` | Meta | Path => true, Item | Block - | Vis - | Lifetime => false, + | Vis => false, - TT => unreachable!(), - } - } - - /// Old variant of `may_be_ident`, being phased out. - fn nt_may_be_ident(nt: &token::Nonterminal) -> bool { - match nt { - NtIdent(..) => true, - - NtLifetime(_) => false, + Ident + | Lifetime + | TT => unreachable!(), } } @@ -66,19 +54,15 @@ impl<'a> Parser<'a> { // The follow-set of :vis + "priv" keyword + interpolated/metavar-expansion token::Comma | token::Ident(..) - | token::Interpolated(..) + | token::InterpolatedIdent(..) + | token::InterpolatedLifetime(..) | token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(_))) => true, _ => token.can_begin_type(), }, NonterminalKind::Block => match &token.kind { token::OpenDelim(Delimiter::Brace) => true, - token::Interpolated(nt) => match **nt { - NtLifetime(_) => true, - NtIdent(..) => false, - }, token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(k))) => match k { NonterminalKind::Block - | NonterminalKind::Lifetime | NonterminalKind::Stmt | NonterminalKind::Expr | NonterminalKind::Literal => true, @@ -86,24 +70,26 @@ impl<'a> Parser<'a> { | NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr | NonterminalKind::Ty - | NonterminalKind::Ident | NonterminalKind::Meta | NonterminalKind::Path | NonterminalKind::Vis => false, - NonterminalKind::TT => unreachable!(), + NonterminalKind::Ident | NonterminalKind::Lifetime | NonterminalKind::TT => { + unreachable!() + } }, + token::InterpolatedLifetime(..) => true, _ => false, }, NonterminalKind::Path | NonterminalKind::Meta => match &token.kind { - token::ModSep | token::Ident(..) => true, - token::Interpolated(nt) => nt_may_be_ident(nt), + token::ModSep | token::Ident(..) | token::InterpolatedIdent(..) => true, token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(kind))) => { may_be_ident(*kind) } _ => false, }, NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => match &token.kind { - token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) + // box, ref, mut, and other identifiers (can stricten) + token::Ident(..) | token::InterpolatedIdent(..) | token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern token::OpenDelim(Delimiter::Bracket) | // slice pattern token::BinOp(token::And) | // reference @@ -117,20 +103,13 @@ impl<'a> Parser<'a> { token::BinOp(token::Shl) => true, // path (double UFCS) // leading vert `|` or-pattern token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr), - token::Interpolated(nt) => nt_may_be_ident(nt), token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(kind))) => { may_be_ident(*kind) } _ => false, }, NonterminalKind::Lifetime => match &token.kind { - token::Lifetime(_) => true, - token::Interpolated(nt) => { - matches!(**nt, NtLifetime(_)) - } - token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar( - NonterminalKind::Lifetime, - ))) => true, + token::Lifetime(_) | token::InterpolatedLifetime(..) => true, _ => false, }, NonterminalKind::TT | NonterminalKind::Item | NonterminalKind::Stmt => { @@ -146,103 +125,80 @@ impl<'a> Parser<'a> { // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro, // which requires having captured tokens available. Since we cannot determine // in advance whether or not a proc-macro will be (transitively) invoked, - // we always capture tokens for any `Nonterminal` which needs them. - let mut nt = match kind { - // Note that TT is treated differently to all the others. - NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())), + // we always capture tokens for any nonterminal which needs them. + match kind { + NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())), NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? { - Some(item) => return Ok(ParseNtResult::Item(item)), - None => { - return Err(UnexpectedNonterminal::Item(self.token.span) - .into_diagnostic(&self.sess.span_diagnostic)); - } + Some(item) => Ok(ParseNtResult::Item(item)), + None => Err(UnexpectedNonterminal::Item(self.token.span) + .into_diagnostic(&self.sess.span_diagnostic)), }, NonterminalKind::Block => { // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`), // the ':block' matcher does not support them - return Ok(ParseNtResult::Block( - self.collect_tokens_no_attrs(|this| this.parse_block())?) - ) + Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?)) } NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? { - Some(stmt) => return Ok(ParseNtResult::Stmt(P(stmt))), - None => { - return Err(UnexpectedNonterminal::Statement(self.token.span) - .into_diagnostic(&self.sess.span_diagnostic)); - } + Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))), + None => Err(UnexpectedNonterminal::Statement(self.token.span) + .into_diagnostic(&self.sess.span_diagnostic)), }, - NonterminalKind::PatParam { inferred } => { - return Ok(ParseNtResult::PatParam(self.collect_tokens_no_attrs(|this| - this.parse_pat_no_top_alt(None, None) - )?, inferred)) - } + NonterminalKind::PatParam { inferred } => Ok(ParseNtResult::PatParam( + self.collect_tokens_no_attrs(|this| this.parse_pat_no_top_alt(None, None))?, + inferred, + )), NonterminalKind::PatWithOr => { - return Ok(ParseNtResult::PatWithOr(self.collect_tokens_no_attrs(|this| + Ok(ParseNtResult::PatWithOr(self.collect_tokens_no_attrs(|this| { this.parse_pat_allow_top_alt( None, RecoverComma::No, RecoverColon::No, CommaRecoveryMode::EitherTupleOrPipe, ) - )?)) - } - NonterminalKind::Expr => { - return Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?)); + })?)) } + NonterminalKind::Expr => Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?)), NonterminalKind::Literal => { // The `:literal` matcher does not support attributes - return Ok(ParseNtResult::Literal( + Ok(ParseNtResult::Literal( self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?, )) } - - NonterminalKind::Ty => return Ok(ParseNtResult::Ty( - self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())? + NonterminalKind::Ty => Ok(ParseNtResult::Ty( + self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, )), - - // this could be handled like a token, since it is one - NonterminalKind::Ident - if let Some((ident, is_raw)) = get_macro_ident(&self.token) => - { - self.bump(); - NtIdent(ident, is_raw) - } NonterminalKind::Ident => { - return Err(UnexpectedNonterminal::Ident { - span: self.token.span, - token: self.token.clone(), - }.into_diagnostic(&self.sess.span_diagnostic)); + if let Some((ident, is_raw)) = get_macro_ident(&self.token) { + self.bump(); + Ok(ParseNtResult::Ident(ident, is_raw)) + } else { + Err(UnexpectedNonterminal::Ident { + span: self.token.span, + token: self.token.clone(), + } + .into_diagnostic(&self.sess.span_diagnostic)) + } + } + NonterminalKind::Path => Ok(ParseNtResult::Path(P( + self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? + ))), + NonterminalKind::Meta => Ok(ParseNtResult::Meta(P(self.parse_attr_item(true)?))), + NonterminalKind::Vis => { + Ok(ParseNtResult::Vis(P(self + .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?))) } - NonterminalKind::Path => return Ok(ParseNtResult::Path( - P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?), - )), - NonterminalKind::Meta => return Ok(ParseNtResult::Meta(P(self.parse_attr_item(true)?))), - NonterminalKind::Vis => return Ok(ParseNtResult::Vis( - P(self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?), - )), NonterminalKind::Lifetime => { if self.check_lifetime() { - NtLifetime(self.expect_lifetime().ident) + Ok(ParseNtResult::Lifetime(self.expect_lifetime().ident)) } else { - return Err(UnexpectedNonterminal::Lifetime { + Err(UnexpectedNonterminal::Lifetime { span: self.token.span, token: self.token.clone(), - }.into_diagnostic(&self.sess.span_diagnostic)); + } + .into_diagnostic(&self.sess.span_diagnostic)) } } - }; - - // If tokens are supported at all, they should be collected. - if matches!(nt.tokens_mut(), Some(None)) { - panic!( - "Missing tokens for nt {:?} at {:?}: {:?}", - nt, - nt.span(), - pprust::nonterminal_to_string(&nt) - ); } - - Ok(ParseNtResult::Nt(Lrc::new(nt))) } } diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index e5710c2ceefab..a91fd2a7d4503 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -495,7 +495,7 @@ impl<'a> Parser<'a> { mutbl = Mutability::Mut; let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing); self.bump(); - self.bump_with((dyn_tok, dyn_tok_sp)); + self.bump_with(5, (dyn_tok, dyn_tok_sp)); } let ty = self.parse_ty_no_plus()?; Ok(TyKind::Ref(opt_lifetime, MutTy { ty, mutbl }))