Rollup merge of #96433 - petrochenkov:delim, r=nnethercote

rustc_ast: Harmonize delimiter naming with `proc_macro::Delimiter`

Compiler cannot reuse `proc_macro::Delimiter` directly due to extra impls, but can at least use the same naming.

After this PR the only difference between these two enums is that `proc_macro::Delimiter::None` is turned into `token::Delimiter::Invisible`.
It's my mistake that the invisible delimiter is called `None` on stable, during the stabilization I audited the naming and wrote the docs, but missed the fact that the `None` naming gives a wrong and confusing impression about what this thing is.

cc https://github.com/rust-lang/rust/pull/96421
r? ``@nnethercote``
This commit is contained in:
Dylan DPC 2022-04-28 20:13:02 +02:00 committed by GitHub
commit 0cbf3b2b30
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
41 changed files with 433 additions and 426 deletions

View file

@ -23,7 +23,7 @@
pub use UnsafeSource::*; pub use UnsafeSource::*;
use crate::ptr::P; use crate::ptr::P;
use crate::token::{self, CommentKind, DelimToken, Token}; use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree}; use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
@ -1542,7 +1542,7 @@ pub enum MacArgs {
} }
impl MacArgs { impl MacArgs {
pub fn delim(&self) -> Option<DelimToken> { pub fn delim(&self) -> Option<Delimiter> {
match self { match self {
MacArgs::Delimited(_, delim, _) => Some(delim.to_token()), MacArgs::Delimited(_, delim, _) => Some(delim.to_token()),
MacArgs::Empty | MacArgs::Eq(..) => None, MacArgs::Empty | MacArgs::Eq(..) => None,
@ -1582,20 +1582,20 @@ pub enum MacDelimiter {
} }
impl MacDelimiter { impl MacDelimiter {
pub fn to_token(self) -> DelimToken { pub fn to_token(self) -> Delimiter {
match self { match self {
MacDelimiter::Parenthesis => DelimToken::Paren, MacDelimiter::Parenthesis => Delimiter::Parenthesis,
MacDelimiter::Bracket => DelimToken::Bracket, MacDelimiter::Bracket => Delimiter::Bracket,
MacDelimiter::Brace => DelimToken::Brace, MacDelimiter::Brace => Delimiter::Brace,
} }
} }
pub fn from_token(delim: DelimToken) -> Option<MacDelimiter> { pub fn from_token(delim: Delimiter) -> Option<MacDelimiter> {
match delim { match delim {
token::Paren => Some(MacDelimiter::Parenthesis), Delimiter::Parenthesis => Some(MacDelimiter::Parenthesis),
token::Bracket => Some(MacDelimiter::Bracket), Delimiter::Bracket => Some(MacDelimiter::Bracket),
token::Brace => Some(MacDelimiter::Brace), Delimiter::Brace => Some(MacDelimiter::Brace),
token::NoDelim => None, Delimiter::Invisible => None,
} }
} }
} }

View file

@ -5,7 +5,7 @@
use crate::ast::{Lit, LitKind}; use crate::ast::{Lit, LitKind};
use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem}; use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Path, PathSegment}; use crate::ast::{Path, PathSegment};
use crate::token::{self, CommentKind, Token}; use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree}; use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing}; use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing};
use crate::tokenstream::{LazyTokenStream, TokenStream}; use crate::tokenstream::{LazyTokenStream, TokenStream};
@ -513,7 +513,7 @@ fn token_trees_and_spacings(&self, span: Span) -> Vec<TreeAndSpacing> {
vec![ vec![
TokenTree::Delimited( TokenTree::Delimited(
DelimSpan::from_single(span), DelimSpan::from_single(span),
token::Paren, Delimiter::Parenthesis,
TokenStream::new(tokens), TokenStream::new(tokens),
) )
.into(), .into(),
@ -540,7 +540,7 @@ fn name_value_from_tokens(
tokens: &mut impl Iterator<Item = TokenTree>, tokens: &mut impl Iterator<Item = TokenTree>,
) -> Option<MetaItemKind> { ) -> Option<MetaItemKind> {
match tokens.next() { match tokens.next() {
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => { Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees()) MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
} }
Some(TokenTree::Token(token)) => { Some(TokenTree::Token(token)) => {
@ -565,7 +565,7 @@ fn from_tokens(
tokens: &mut iter::Peekable<impl Iterator<Item = TokenTree>>, tokens: &mut iter::Peekable<impl Iterator<Item = TokenTree>>,
) -> Option<MetaItemKind> { ) -> Option<MetaItemKind> {
match tokens.peek() { match tokens.peek() {
Some(TokenTree::Delimited(_, token::Paren, inner_tokens)) => { Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => {
let inner_tokens = inner_tokens.clone(); let inner_tokens = inner_tokens.clone();
tokens.next(); tokens.next();
MetaItemKind::list_from_tokens(inner_tokens) MetaItemKind::list_from_tokens(inner_tokens)
@ -606,7 +606,7 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
tokens.next(); tokens.next();
return Some(NestedMetaItem::Literal(lit)); return Some(NestedMetaItem::Literal(lit));
} }
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => { Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
let inner_tokens = inner_tokens.clone(); let inner_tokens = inner_tokens.clone();
tokens.next(); tokens.next();
return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable()); return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable());

View file

@ -1,5 +1,4 @@
pub use BinOpToken::*; pub use BinOpToken::*;
pub use DelimToken::*;
pub use LitKind::*; pub use LitKind::*;
pub use Nonterminal::*; pub use Nonterminal::*;
pub use TokenKind::*; pub use TokenKind::*;
@ -37,18 +36,26 @@ pub enum BinOpToken {
Shr, Shr,
} }
/// A delimiter token. /// Describes how a sequence of token trees is delimited.
#[derive(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Debug, Copy)] /// Cannot use `proc_macro::Delimiter` directly because this
#[derive(HashStable_Generic)] /// structure should implement some additional traits.
pub enum DelimToken { /// The `None` variant is also renamed to `Invisible` to be
/// A round parenthesis (i.e., `(` or `)`). /// less confusing and better convey the semantics.
Paren, #[derive(Copy, Clone, Debug, PartialEq, Eq)]
/// A square bracket (i.e., `[` or `]`). #[derive(Encodable, Decodable, Hash, HashStable_Generic)]
Bracket, pub enum Delimiter {
/// A curly brace (i.e., `{` or `}`). /// `( ... )`
Parenthesis,
/// `{ ... }`
Brace, Brace,
/// An empty delimiter. /// `[ ... ]`
NoDelim, Bracket,
/// `Ø ... Ø`
/// An invisible delimiter, that may, for example, appear around tokens coming from a
/// "macro variable" `$var`. It is important to preserve operator priorities in cases like
/// `$var * 3` where `$var` is `1 + 2`.
/// Invisible delimiters might not survive roundtrip of a token stream through a string.
Invisible,
} }
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
@ -212,9 +219,9 @@ pub enum TokenKind {
/// Used by proc macros for representing lifetimes, not generated by lexer right now. /// Used by proc macros for representing lifetimes, not generated by lexer right now.
SingleQuote, SingleQuote,
/// An opening delimiter (e.g., `{`). /// An opening delimiter (e.g., `{`).
OpenDelim(DelimToken), OpenDelim(Delimiter),
/// A closing delimiter (e.g., `}`). /// A closing delimiter (e.g., `}`).
CloseDelim(DelimToken), CloseDelim(Delimiter),
/* Literals */ /* Literals */
Literal(Lit), Literal(Lit),
@ -387,8 +394,8 @@ pub fn can_begin_type(&self) -> bool {
match self.uninterpolate().kind { match self.uninterpolate().kind {
Ident(name, is_raw) => Ident(name, is_raw) =>
ident_can_begin_type(name, self.span, is_raw), // type name or keyword ident_can_begin_type(name, self.span, is_raw), // type name or keyword
OpenDelim(Paren) | // tuple OpenDelim(Delimiter::Parenthesis) | // tuple
OpenDelim(Bracket) | // array OpenDelim(Delimiter::Bracket) | // array
Not | // never Not | // never
BinOp(Star) | // raw pointer BinOp(Star) | // raw pointer
BinOp(And) | // reference BinOp(And) | // reference
@ -405,7 +412,7 @@ pub fn can_begin_type(&self) -> bool {
/// Returns `true` if the token can appear at the start of a const param. /// Returns `true` if the token can appear at the start of a const param.
pub fn can_begin_const_arg(&self) -> bool { pub fn can_begin_const_arg(&self) -> bool {
match self.kind { match self.kind {
OpenDelim(Brace) => true, OpenDelim(Delimiter::Brace) => true,
Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)), Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
_ => self.can_begin_literal_maybe_minus(), _ => self.can_begin_literal_maybe_minus(),
} }
@ -417,7 +424,7 @@ pub fn can_begin_bound(&self) -> bool {
|| self.is_lifetime() || self.is_lifetime()
|| self.is_keyword(kw::For) || self.is_keyword(kw::For)
|| self == &Question || self == &Question
|| self == &OpenDelim(Paren) || self == &OpenDelim(Delimiter::Parenthesis)
} }
/// Returns `true` if the token is any literal. /// Returns `true` if the token is any literal.

View file

@ -13,7 +13,7 @@
//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking //! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original. //! ownership of the original.
use crate::token::{self, DelimToken, Token, TokenKind}; use crate::token::{self, Delimiter, Token, TokenKind};
use crate::AttrVec; use crate::AttrVec;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
@ -42,7 +42,7 @@ pub enum TokenTree {
/// A single token. /// A single token.
Token(Token), Token(Token),
/// A delimited sequence of token trees. /// A delimited sequence of token trees.
Delimited(DelimSpan, DelimToken, TokenStream), Delimited(DelimSpan, Delimiter, TokenStream),
} }
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
@ -57,7 +57,7 @@ fn _dummy()
where where
Token: Send + Sync, Token: Send + Sync,
DelimSpan: Send + Sync, DelimSpan: Send + Sync,
DelimToken: Send + Sync, Delimiter: Send + Sync,
TokenStream: Send + Sync, TokenStream: Send + Sync,
{ {
} }
@ -175,7 +175,7 @@ fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
#[derive(Clone, Debug, Encodable, Decodable)] #[derive(Clone, Debug, Encodable, Decodable)]
pub enum AttrAnnotatedTokenTree { pub enum AttrAnnotatedTokenTree {
Token(Token), Token(Token),
Delimited(DelimSpan, DelimToken, AttrAnnotatedTokenStream), Delimited(DelimSpan, Delimiter, AttrAnnotatedTokenStream),
/// Stores the attributes for an attribute target, /// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target. /// along with the tokens for that attribute target.
/// See `AttributesData` for more information /// See `AttributesData` for more information

View file

@ -38,7 +38,7 @@
#![recursion_limit = "256"] #![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)] #![allow(rustc::potential_query_instability)]
use rustc_ast::token::{self, Token}; use rustc_ast::token::{Delimiter, Token};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree}; use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
use rustc_ast::visit; use rustc_ast::visit;
use rustc_ast::{self as ast, *}; use rustc_ast::{self as ast, *};
@ -886,7 +886,7 @@ fn unwrap_single_token(sess: &Session, tokens: TokenStream, span: Span) -> Token
match tokens.into_trees().next() { match tokens.into_trees().next() {
Some(TokenTree::Token(token)) => token, Some(TokenTree::Token(token)) => token,
Some(TokenTree::Delimited(_, delim, tokens)) => { Some(TokenTree::Delimited(_, delim, tokens)) => {
if delim != token::NoDelim { if delim != Delimiter::Invisible {
sess.diagnostic().delay_span_bug( sess.diagnostic().delay_span_bug(
span, span,
"unexpected delimiter in key-value attribute's value", "unexpected delimiter in key-value attribute's value",

View file

@ -6,7 +6,7 @@
use crate::pp::{self, Breaks}; use crate::pp::{self, Breaks};
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, BinOpToken, CommentKind, DelimToken, Nonterminal, Token, TokenKind}; use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::util::classify; use rustc_ast::util::classify;
use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle}; use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle};
@ -155,10 +155,10 @@ fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
} }
match tt { match tt {
TokenTree::Token(token) => !matches!(token.kind, token::Comma | token::Not | token::Dot), TokenTree::Token(token) => !matches!(token.kind, token::Comma | token::Not | token::Dot),
TokenTree::Delimited(_, DelimToken::Paren, _) => { TokenTree::Delimited(_, Delimiter::Parenthesis, _) => {
!matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. })) !matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. }))
} }
TokenTree::Delimited(_, DelimToken::Bracket, _) => { TokenTree::Delimited(_, Delimiter::Bracket, _) => {
!matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. })) !matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. }))
} }
TokenTree::Delimited(..) => true, TokenTree::Delimited(..) => true,
@ -556,12 +556,12 @@ fn print_mac_common(
header: Option<MacHeader<'_>>, header: Option<MacHeader<'_>>,
has_bang: bool, has_bang: bool,
ident: Option<Ident>, ident: Option<Ident>,
delim: Option<DelimToken>, delim: Option<Delimiter>,
tts: &TokenStream, tts: &TokenStream,
convert_dollar_crate: bool, convert_dollar_crate: bool,
span: Span, span: Span,
) { ) {
if delim == Some(DelimToken::Brace) { if delim == Some(Delimiter::Brace) {
self.cbox(INDENT_UNIT); self.cbox(INDENT_UNIT);
} }
match header { match header {
@ -577,7 +577,7 @@ fn print_mac_common(
self.print_ident(ident); self.print_ident(ident);
} }
match delim { match delim {
Some(DelimToken::Brace) => { Some(Delimiter::Brace) => {
if header.is_some() || has_bang || ident.is_some() { if header.is_some() || has_bang || ident.is_some() {
self.nbsp(); self.nbsp();
} }
@ -758,13 +758,15 @@ fn token_kind_to_string_ext(
token::RArrow => "->".into(), token::RArrow => "->".into(),
token::LArrow => "<-".into(), token::LArrow => "<-".into(),
token::FatArrow => "=>".into(), token::FatArrow => "=>".into(),
token::OpenDelim(token::Paren) => "(".into(), token::OpenDelim(Delimiter::Parenthesis) => "(".into(),
token::CloseDelim(token::Paren) => ")".into(), token::CloseDelim(Delimiter::Parenthesis) => ")".into(),
token::OpenDelim(token::Bracket) => "[".into(), token::OpenDelim(Delimiter::Bracket) => "[".into(),
token::CloseDelim(token::Bracket) => "]".into(), token::CloseDelim(Delimiter::Bracket) => "]".into(),
token::OpenDelim(token::Brace) => "{".into(), token::OpenDelim(Delimiter::Brace) => "{".into(),
token::CloseDelim(token::Brace) => "}".into(), token::CloseDelim(Delimiter::Brace) => "}".into(),
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".into(), token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) => {
"".into()
}
token::Pound => "#".into(), token::Pound => "#".into(),
token::Dollar => "$".into(), token::Dollar => "$".into(),
token::Question => "?".into(), token::Question => "?".into(),

View file

@ -1,6 +1,6 @@
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::{Applicability, PResult}; use rustc_errors::{Applicability, PResult};
@ -395,9 +395,9 @@ fn parse_options<'a>(
) -> PResult<'a, ()> { ) -> PResult<'a, ()> {
let span_start = p.prev_token.span; let span_start = p.prev_token.span;
p.expect(&token::OpenDelim(token::DelimToken::Paren))?; p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
while !p.eat(&token::CloseDelim(token::DelimToken::Paren)) { while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
if !is_global_asm && p.eat_keyword(sym::pure) { if !is_global_asm && p.eat_keyword(sym::pure) {
try_set_option(p, args, sym::pure, ast::InlineAsmOptions::PURE); try_set_option(p, args, sym::pure, ast::InlineAsmOptions::PURE);
} else if !is_global_asm && p.eat_keyword(sym::nomem) { } else if !is_global_asm && p.eat_keyword(sym::nomem) {
@ -421,7 +421,7 @@ fn parse_options<'a>(
} }
// Allow trailing commas // Allow trailing commas
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) { if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
break; break;
} }
p.expect(&token::Comma)?; p.expect(&token::Comma)?;
@ -436,9 +436,9 @@ fn parse_options<'a>(
fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, ()> { fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, ()> {
let span_start = p.prev_token.span; let span_start = p.prev_token.span;
p.expect(&token::OpenDelim(token::DelimToken::Paren))?; p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) { if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
let err = p.sess.span_diagnostic.struct_span_err( let err = p.sess.span_diagnostic.struct_span_err(
p.token.span, p.token.span,
"at least one abi must be provided as an argument to `clobber_abi`", "at least one abi must be provided as an argument to `clobber_abi`",
@ -454,7 +454,7 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a,
} }
Err(opt_lit) => { Err(opt_lit) => {
// If the non-string literal is a closing paren then it's the end of the list and is fine // If the non-string literal is a closing paren then it's the end of the list and is fine
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) { if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
break; break;
} }
let span = opt_lit.map_or(p.token.span, |lit| lit.span); let span = opt_lit.map_or(p.token.span, |lit| lit.span);
@ -466,7 +466,7 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a,
}; };
// Allow trailing commas // Allow trailing commas
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) { if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
break; break;
} }
p.expect(&token::Comma)?; p.expect(&token::Comma)?;
@ -501,7 +501,7 @@ fn parse_reg<'a>(
p: &mut Parser<'a>, p: &mut Parser<'a>,
explicit_reg: &mut bool, explicit_reg: &mut bool,
) -> PResult<'a, ast::InlineAsmRegOrRegClass> { ) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
p.expect(&token::OpenDelim(token::DelimToken::Paren))?; p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let result = match p.token.uninterpolate().kind { let result = match p.token.uninterpolate().kind {
token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name), token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => { token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
@ -515,7 +515,7 @@ fn parse_reg<'a>(
} }
}; };
p.bump(); p.bump();
p.expect(&token::CloseDelim(token::DelimToken::Paren))?; p.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
Ok(result) Ok(result)
} }

View file

@ -1,7 +1,7 @@
//! Conditional compilation stripping. //! Conditional compilation stripping.
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{DelimToken, Token, TokenKind}; use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree}; use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use rustc_ast::tokenstream::{DelimSpan, Spacing}; use rustc_ast::tokenstream::{DelimSpan, Spacing};
use rustc_ast::tokenstream::{LazyTokenStream, TokenTree}; use rustc_ast::tokenstream::{LazyTokenStream, TokenTree};
@ -418,7 +418,7 @@ fn expand_cfg_attr_item(
// in `#[attr]`, so just use the span of the `#` token. // in `#[attr]`, so just use the span of the `#` token.
let bracket_group = AttrAnnotatedTokenTree::Delimited( let bracket_group = AttrAnnotatedTokenTree::Delimited(
DelimSpan::from_single(pound_span), DelimSpan::from_single(pound_span),
DelimToken::Bracket, Delimiter::Bracket,
item.tokens item.tokens
.as_ref() .as_ref()
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item)) .unwrap_or_else(|| panic!("Missing tokens for {:?}", item))

View file

@ -8,7 +8,7 @@
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::mut_visit::*; use rustc_ast::mut_visit::*;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_ast::visit::{self, AssocCtxt, Visitor}; use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::{AssocItemKind, AstLike, AstLikeWrapper, AttrStyle, ExprKind, ForeignItemKind}; use rustc_ast::{AssocItemKind, AstLike, AstLikeWrapper, AttrStyle, ExprKind, ForeignItemKind};
@ -884,7 +884,7 @@ pub fn parse_ast_fragment<'a>(
AstFragmentKind::Stmts => { AstFragmentKind::Stmts => {
let mut stmts = SmallVec::new(); let mut stmts = SmallVec::new();
// Won't make progress on a `}`. // Won't make progress on a `}`.
while this.token != token::Eof && this.token != token::CloseDelim(token::Brace) { while this.token != token::Eof && this.token != token::CloseDelim(Delimiter::Brace) {
if let Some(stmt) = this.parse_full_stmt(AttemptLocalParseRecovery::Yes)? { if let Some(stmt) = this.parse_full_stmt(AttemptLocalParseRecovery::Yes)? {
stmts.push(stmt); stmts.push(stmt);
} }

View file

@ -11,16 +11,16 @@
crate mod transcribe; crate mod transcribe;
use metavar_expr::MetaVarExpr; use metavar_expr::MetaVarExpr;
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind}; use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};
use rustc_ast::tokenstream::DelimSpan; use rustc_ast::tokenstream::DelimSpan;
use rustc_span::symbol::Ident; use rustc_span::symbol::Ident;
use rustc_span::Span; use rustc_span::Span;
/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiters /// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`.
/// might be `NoDelim`, but they are not represented explicitly. /// The delimiters are not represented explicitly in the `tts` vector.
#[derive(PartialEq, Encodable, Decodable, Debug)] #[derive(PartialEq, Encodable, Decodable, Debug)]
struct Delimited { struct Delimited {
delim: token::DelimToken, delim: Delimiter,
/// FIXME: #67062 has details about why this is sub-optimal. /// FIXME: #67062 has details about why this is sub-optimal.
tts: Vec<TokenTree>, tts: Vec<TokenTree>,
} }

View file

@ -106,7 +106,7 @@
//! bound. //! bound.
use crate::mbe::{KleeneToken, TokenTree}; use crate::mbe::{KleeneToken, TokenTree};
use rustc_ast::token::{DelimToken, Token, TokenKind}; use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_errors::MultiSpan; use rustc_errors::MultiSpan;
@ -439,7 +439,7 @@ fn check_nested_occurrences(
} }
(NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del)) (NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
| (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del)) | (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
if del.delim == DelimToken::Brace => if del.delim == Delimiter::Brace =>
{ {
let macro_rules = state == NestedMacroState::MacroRulesNotName; let macro_rules = state == NestedMacroState::MacroRulesNotName;
state = NestedMacroState::Empty; state = NestedMacroState::Empty;
@ -469,7 +469,7 @@ fn check_nested_occurrences(
check_occurrences(sess, node_id, tt, macros, binders, ops, valid); check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
} }
(NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del)) (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
if del.delim == DelimToken::Paren => if del.delim == Delimiter::Parenthesis =>
{ {
state = NestedMacroState::MacroNameParen; state = NestedMacroState::MacroNameParen;
nested_binders = Binders::default(); nested_binders = Binders::default();
@ -484,7 +484,7 @@ fn check_nested_occurrences(
); );
} }
(NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del)) (NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
if del.delim == DelimToken::Brace => if del.delim == Delimiter::Brace =>
{ {
state = NestedMacroState::Empty; state = NestedMacroState::Empty;
check_occurrences( check_occurrences(

View file

@ -8,7 +8,7 @@
use crate::mbe::transcribe::transcribe; use crate::mbe::transcribe::transcribe;
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind, TokenKind::*}; use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
@ -1250,8 +1250,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
]; ];
match tok { match tok {
TokenTree::Token(token) => match token.kind { TokenTree::Token(token) => match token.kind {
OpenDelim(token::DelimToken::Brace) OpenDelim(Delimiter::Brace)
| OpenDelim(token::DelimToken::Bracket) | OpenDelim(Delimiter::Bracket)
| Comma | Comma
| FatArrow | FatArrow
| Colon | Colon

View file

@ -1,4 +1,4 @@
use rustc_ast::token; use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::{Cursor, TokenStream, TokenTree}; use rustc_ast::tokenstream::{Cursor, TokenStream, TokenTree};
use rustc_ast::{LitIntType, LitKind}; use rustc_ast::{LitIntType, LitKind};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
@ -35,7 +35,7 @@ impl MetaVarExpr {
) -> PResult<'sess, MetaVarExpr> { ) -> PResult<'sess, MetaVarExpr> {
let mut tts = input.trees(); let mut tts = input.trees();
let ident = parse_ident(&mut tts, sess, outer_span)?; let ident = parse_ident(&mut tts, sess, outer_span)?;
let Some(TokenTree::Delimited(_, token::Paren, args)) = tts.next() else { let Some(TokenTree::Delimited(_, Delimiter::Parenthesis, args)) = tts.next() else {
let msg = "meta-variable expression parameter must be wrapped in parentheses"; let msg = "meta-variable expression parameter must be wrapped in parentheses";
return Err(sess.span_diagnostic.struct_span_err(ident.span, msg)); return Err(sess.span_diagnostic.struct_span_err(ident.span, msg));
}; };

View file

@ -1,7 +1,7 @@
use crate::mbe::macro_parser::count_metavar_decls; use crate::mbe::macro_parser::count_metavar_decls;
use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree}; use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree};
use rustc_ast::token::{self, Token}; use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::{tokenstream, NodeId}; use rustc_ast::{tokenstream, NodeId};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_feature::Features; use rustc_feature::Features;
@ -147,11 +147,11 @@ fn parse_tree(
match tree { match tree {
// `tree` is a `$` token. Look at the next token in `trees` // `tree` is a `$` token. Look at the next token in `trees`
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => { tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
// FIXME: Handle `None`-delimited groups in a more systematic way // FIXME: Handle `Invisible`-delimited groups in a more systematic way
// during parsing. // during parsing.
let mut next = outer_trees.next(); let mut next = outer_trees.next();
let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>; let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next { if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next {
trees = Box::new(tts.into_trees()); trees = Box::new(tts.into_trees());
next = trees.next(); next = trees.next();
} else { } else {
@ -162,7 +162,7 @@ fn parse_tree(
// `tree` is followed by a delimited set of token trees. // `tree` is followed by a delimited set of token trees.
Some(tokenstream::TokenTree::Delimited(delim_span, delim, tts)) => { Some(tokenstream::TokenTree::Delimited(delim_span, delim, tts)) => {
if parsing_patterns { if parsing_patterns {
if delim != token::Paren { if delim != Delimiter::Parenthesis {
span_dollar_dollar_or_metavar_in_the_lhs_err( span_dollar_dollar_or_metavar_in_the_lhs_err(
sess, sess,
&Token { kind: token::OpenDelim(delim), span: delim_span.entire() }, &Token { kind: token::OpenDelim(delim), span: delim_span.entire() },
@ -170,7 +170,7 @@ fn parse_tree(
} }
} else { } else {
match delim { match delim {
token::Brace => { Delimiter::Brace => {
// The delimiter is `{`. This indicates the beginning // The delimiter is `{`. This indicates the beginning
// of a meta-variable expression (e.g. `${count(ident)}`). // of a meta-variable expression (e.g. `${count(ident)}`).
// Try to parse the meta-variable expression. // Try to parse the meta-variable expression.
@ -191,7 +191,7 @@ fn parse_tree(
} }
} }
} }
token::Paren => {} Delimiter::Parenthesis => {}
_ => { _ => {
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim)); let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
let msg = format!("expected `(` or `{{`, found `{}`", tok); let msg = format!("expected `(` or `{{`, found `{}`", tok);

View file

@ -2,7 +2,7 @@
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch}; use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch};
use crate::mbe::{self, MetaVarExpr}; use crate::mbe::{self, MetaVarExpr};
use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{pluralize, PResult}; use rustc_errors::{pluralize, PResult};
@ -27,23 +27,14 @@ fn visit_span(&mut self, span: &mut Span) {
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
enum Frame<'a> { enum Frame<'a> {
Delimited { Delimited { tts: &'a [mbe::TokenTree], idx: usize, delim: Delimiter, span: DelimSpan },
tts: &'a [mbe::TokenTree], Sequence { tts: &'a [mbe::TokenTree], idx: usize, sep: Option<Token> },
idx: usize,
delim_token: token::DelimToken,
span: DelimSpan,
},
Sequence {
tts: &'a [mbe::TokenTree],
idx: usize,
sep: Option<Token>,
},
} }
impl<'a> Frame<'a> { impl<'a> Frame<'a> {
/// Construct a new frame around the delimited set of tokens. /// Construct a new frame around the delimited set of tokens.
fn new(src: &'a mbe::Delimited, span: DelimSpan) -> Frame<'a> { fn new(src: &'a mbe::Delimited, span: DelimSpan) -> Frame<'a> {
Frame::Delimited { tts: &src.tts, idx: 0, delim_token: src.delim, span } Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span }
} }
} }
@ -150,14 +141,14 @@ pub(super) fn transcribe<'a>(
// We are done processing a Delimited. If this is the top-level delimited, we are // We are done processing a Delimited. If this is the top-level delimited, we are
// done. Otherwise, we unwind the result_stack to append what we have produced to // done. Otherwise, we unwind the result_stack to append what we have produced to
// any previous results. // any previous results.
Frame::Delimited { delim_token, span, .. } => { Frame::Delimited { delim, span, .. } => {
if result_stack.is_empty() { if result_stack.is_empty() {
// No results left to compute! We are back at the top-level. // No results left to compute! We are back at the top-level.
return Ok(TokenStream::new(result)); return Ok(TokenStream::new(result));
} }
// Step back into the parent Delimited. // Step back into the parent Delimited.
let tree = TokenTree::Delimited(span, delim_token, TokenStream::new(result)); let tree = TokenTree::Delimited(span, delim, TokenStream::new(result));
result = result_stack.pop().unwrap(); result = result_stack.pop().unwrap();
result.push(tree.into()); result.push(tree.into());
} }
@ -240,7 +231,7 @@ pub(super) fn transcribe<'a>(
} }
MatchedNonterminal(ref nt) => { MatchedNonterminal(ref nt) => {
// Other variables are emitted into the output stream as groups with // Other variables are emitted into the output stream as groups with
// `Delimiter::None` to maintain parsing priorities. // `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser. // `Interpolated` is currently used for such groups in rustc parser.
marker.visit_span(&mut sp); marker.visit_span(&mut sp);
let token = TokenTree::token(token::Interpolated(nt.clone()), sp); let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
@ -278,7 +269,7 @@ pub(super) fn transcribe<'a>(
mut_visit::visit_delim_span(&mut span, &mut marker); mut_visit::visit_delim_span(&mut span, &mut marker);
stack.push(Frame::Delimited { stack.push(Frame::Delimited {
tts: &delimited.tts, tts: &delimited.tts,
delim_token: delimited.delim, delim: delimited.delim,
idx: 0, idx: 0,
span, span,
}); });

View file

@ -1,7 +1,7 @@
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse}; use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token}; use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::visit; use rustc_ast::visit;
use rustc_ast::{self as ast, PatKind}; use rustc_ast::{self as ast, PatKind};
@ -77,13 +77,14 @@ fn string_to_tts_macro() {
TokenTree::Delimited(_, first_delim, first_tts), TokenTree::Delimited(_, first_delim, first_tts),
TokenTree::Token(Token { kind: token::FatArrow, .. }), TokenTree::Token(Token { kind: token::FatArrow, .. }),
TokenTree::Delimited(_, second_delim, second_tts), TokenTree::Delimited(_, second_delim, second_tts),
] if macro_delim == &token::Paren => { ] if macro_delim == &Delimiter::Parenthesis => {
let tts = &first_tts.trees().collect::<Vec<_>>(); let tts = &first_tts.trees().collect::<Vec<_>>();
match &tts[..] { match &tts[..] {
[ [
TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Dollar, .. }),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
] if first_delim == &token::Paren && name.as_str() == "a" => {} ] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
}
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts), _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
} }
let tts = &second_tts.trees().collect::<Vec<_>>(); let tts = &second_tts.trees().collect::<Vec<_>>();
@ -91,7 +92,8 @@ fn string_to_tts_macro() {
[ [
TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Dollar, .. }),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
] if second_delim == &token::Paren && name.as_str() == "a" => {} ] if second_delim == &Delimiter::Parenthesis
&& name.as_str() == "a" => {}
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts), _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
} }
} }
@ -113,7 +115,7 @@ fn string_to_tts_1() {
TokenTree::token(token::Ident(Symbol::intern("a"), false), sp(3, 4)).into(), TokenTree::token(token::Ident(Symbol::intern("a"), false), sp(3, 4)).into(),
TokenTree::Delimited( TokenTree::Delimited(
DelimSpan::from_pair(sp(5, 6), sp(13, 14)), DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
token::DelimToken::Paren, Delimiter::Parenthesis,
TokenStream::new(vec![ TokenStream::new(vec![
TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(6, 7)).into(), TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(6, 7)).into(),
TokenTree::token(token::Colon, sp(8, 9)).into(), TokenTree::token(token::Colon, sp(8, 9)).into(),
@ -124,7 +126,7 @@ fn string_to_tts_1() {
.into(), .into(),
TokenTree::Delimited( TokenTree::Delimited(
DelimSpan::from_pair(sp(15, 16), sp(20, 21)), DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
token::DelimToken::Brace, Delimiter::Brace,
TokenStream::new(vec![ TokenStream::new(vec![
TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(17, 18)).into(), TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(17, 18)).into(),
TokenTree::token(token::Semi, sp(18, 19)).into(), TokenTree::token(token::Semi, sp(18, 19)).into(),

View file

@ -28,24 +28,24 @@ trait ToInternal<T> {
fn to_internal(self) -> T; fn to_internal(self) -> T;
} }
impl FromInternal<token::DelimToken> for Delimiter { impl FromInternal<token::Delimiter> for Delimiter {
fn from_internal(delim: token::DelimToken) -> Delimiter { fn from_internal(delim: token::Delimiter) -> Delimiter {
match delim { match delim {
token::Paren => Delimiter::Parenthesis, token::Delimiter::Parenthesis => Delimiter::Parenthesis,
token::Brace => Delimiter::Brace, token::Delimiter::Brace => Delimiter::Brace,
token::Bracket => Delimiter::Bracket, token::Delimiter::Bracket => Delimiter::Bracket,
token::NoDelim => Delimiter::None, token::Delimiter::Invisible => Delimiter::None,
} }
} }
} }
impl ToInternal<token::DelimToken> for Delimiter { impl ToInternal<token::Delimiter> for Delimiter {
fn to_internal(self) -> token::DelimToken { fn to_internal(self) -> token::Delimiter {
match self { match self {
Delimiter::Parenthesis => token::Paren, Delimiter::Parenthesis => token::Delimiter::Parenthesis,
Delimiter::Brace => token::Brace, Delimiter::Brace => token::Delimiter::Brace,
Delimiter::Bracket => token::Bracket, Delimiter::Bracket => token::Delimiter::Bracket,
Delimiter::None => token::NoDelim, Delimiter::None => token::Delimiter::Invisible,
} }
} }
} }
@ -61,7 +61,7 @@ fn from_internal(
let joint = spacing == Joint; let joint = spacing == Joint;
let Token { kind, span } = match tree { let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => { tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim); let delimiter = pm::Delimiter::from_internal(delim);
return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false }); return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false });
} }
tokenstream::TokenTree::Token(token) => token, tokenstream::TokenTree::Token(token) => token,
@ -164,7 +164,7 @@ macro_rules! op {
.map(|kind| tokenstream::TokenTree::token(kind, span)) .map(|kind| tokenstream::TokenTree::token(kind, span))
.collect(); .collect();
stack.push(TokenTree::Group(Group { stack.push(TokenTree::Group(Group {
delimiter: Delimiter::Bracket, delimiter: pm::Delimiter::Bracket,
stream, stream,
span: DelimSpan::from_single(span), span: DelimSpan::from_single(span),
flatten: false, flatten: false,
@ -181,7 +181,7 @@ macro_rules! op {
Interpolated(nt) => { Interpolated(nt) => {
let stream = nt_to_tokenstream(&nt, rustc.sess(), CanSynthesizeMissingTokens::No); let stream = nt_to_tokenstream(&nt, rustc.sess(), CanSynthesizeMissingTokens::No);
TokenTree::Group(Group { TokenTree::Group(Group {
delimiter: Delimiter::None, delimiter: pm::Delimiter::None,
stream, stream,
span: DelimSpan::from_single(span), span: DelimSpan::from_single(span),
flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess()), flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess()),

View file

@ -1,6 +1,6 @@
use crate::lexer::unicode_chars::UNICODE_ARRAY; use crate::lexer::unicode_chars::UNICODE_ARRAY;
use rustc_ast::ast::{self, AttrStyle}; use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Token, TokenKind}; use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{Spacing, TokenStream}; use rustc_ast::tokenstream::{Spacing, TokenStream};
use rustc_ast::util::unicode::contains_text_flow_control_chars; use rustc_ast::util::unicode::contains_text_flow_control_chars;
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult}; use rustc_errors::{error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
@ -24,8 +24,8 @@
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct UnmatchedBrace { pub struct UnmatchedBrace {
pub expected_delim: token::DelimToken, pub expected_delim: Delimiter,
pub found_delim: Option<token::DelimToken>, pub found_delim: Option<Delimiter>,
pub found_span: Span, pub found_span: Span,
pub unclosed_span: Option<Span>, pub unclosed_span: Option<Span>,
pub candidate_span: Option<Span>, pub candidate_span: Option<Span>,
@ -284,12 +284,12 @@ fn cook_lexer_token(&self, token: rustc_lexer::TokenKind, start: BytePos) -> Opt
rustc_lexer::TokenKind::Semi => token::Semi, rustc_lexer::TokenKind::Semi => token::Semi,
rustc_lexer::TokenKind::Comma => token::Comma, rustc_lexer::TokenKind::Comma => token::Comma,
rustc_lexer::TokenKind::Dot => token::Dot, rustc_lexer::TokenKind::Dot => token::Dot,
rustc_lexer::TokenKind::OpenParen => token::OpenDelim(token::Paren), rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
rustc_lexer::TokenKind::CloseParen => token::CloseDelim(token::Paren), rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(token::Brace), rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(token::Brace), rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(token::Bracket), rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(token::Bracket), rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
rustc_lexer::TokenKind::At => token::At, rustc_lexer::TokenKind::At => token::At,
rustc_lexer::TokenKind::Pound => token::Pound, rustc_lexer::TokenKind::Pound => token::Pound,
rustc_lexer::TokenKind::Tilde => token::Tilde, rustc_lexer::TokenKind::Tilde => token::Tilde,

View file

@ -1,6 +1,6 @@
use super::{StringReader, UnmatchedBrace}; use super::{StringReader, UnmatchedBrace};
use rustc_ast::token::{self, DelimToken, Token}; use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{ use rustc_ast::tokenstream::{
DelimSpan, DelimSpan,
Spacing::{self, *}, Spacing::{self, *},
@ -32,15 +32,15 @@ struct TokenTreesReader<'a> {
string_reader: StringReader<'a>, string_reader: StringReader<'a>,
token: Token, token: Token,
/// Stack of open delimiters and their spans. Used for error message. /// Stack of open delimiters and their spans. Used for error message.
open_braces: Vec<(token::DelimToken, Span)>, open_braces: Vec<(Delimiter, Span)>,
unmatched_braces: Vec<UnmatchedBrace>, unmatched_braces: Vec<UnmatchedBrace>,
/// The type and spans for all braces /// The type and spans for all braces
/// ///
/// Used only for error recovery when arriving to EOF with mismatched braces. /// Used only for error recovery when arriving to EOF with mismatched braces.
matching_delim_spans: Vec<(token::DelimToken, Span, Span)>, matching_delim_spans: Vec<(Delimiter, Span, Span)>,
last_unclosed_found_span: Option<Span>, last_unclosed_found_span: Option<Span>,
/// Collect empty block spans that might have been auto-inserted by editors. /// Collect empty block spans that might have been auto-inserted by editors.
last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span>, last_delim_empty_block_spans: FxHashMap<Delimiter, Span>,
/// Collect the spans of braces (Open, Close). Used only /// Collect the spans of braces (Open, Close). Used only
/// for detecting if blocks are empty and only braces. /// for detecting if blocks are empty and only braces.
matching_block_spans: Vec<(Span, Span)>, matching_block_spans: Vec<(Span, Span)>,
@ -88,7 +88,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> {
for &(_, sp) in &self.open_braces { for &(_, sp) in &self.open_braces {
err.span_label(sp, "unclosed delimiter"); err.span_label(sp, "unclosed delimiter");
self.unmatched_braces.push(UnmatchedBrace { self.unmatched_braces.push(UnmatchedBrace {
expected_delim: token::DelimToken::Brace, expected_delim: Delimiter::Brace,
found_delim: None, found_delim: None,
found_span: self.token.span, found_span: self.token.span,
unclosed_span: Some(sp), unclosed_span: Some(sp),
@ -150,7 +150,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> {
} }
//only add braces //only add braces
if let (DelimToken::Brace, DelimToken::Brace) = (open_brace, delim) { if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, delim) {
self.matching_block_spans.push((open_brace_span, close_brace_span)); self.matching_block_spans.push((open_brace_span, close_brace_span));
} }

View file

@ -2,7 +2,7 @@
// https://www.unicode.org/Public/security/10.0.0/confusables.txt // https://www.unicode.org/Public/security/10.0.0/confusables.txt
use super::StringReader; use super::StringReader;
use crate::token; use crate::token::{self, Delimiter};
use rustc_errors::{Applicability, Diagnostic}; use rustc_errors::{Applicability, Diagnostic};
use rustc_span::{symbol::kw, BytePos, Pos, Span}; use rustc_span::{symbol::kw, BytePos, Pos, Span};
@ -312,12 +312,12 @@
('!', "Exclamation Mark", Some(token::Not)), ('!', "Exclamation Mark", Some(token::Not)),
('?', "Question Mark", Some(token::Question)), ('?', "Question Mark", Some(token::Question)),
('.', "Period", Some(token::Dot)), ('.', "Period", Some(token::Dot)),
('(', "Left Parenthesis", Some(token::OpenDelim(token::Paren))), ('(', "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
(')', "Right Parenthesis", Some(token::CloseDelim(token::Paren))), (')', "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
('[', "Left Square Bracket", Some(token::OpenDelim(token::Bracket))), ('[', "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
(']', "Right Square Bracket", Some(token::CloseDelim(token::Bracket))), (']', "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
('{', "Left Curly Brace", Some(token::OpenDelim(token::Brace))), ('{', "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
('}', "Right Curly Brace", Some(token::CloseDelim(token::Brace))), ('}', "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
('*', "Asterisk", Some(token::BinOp(token::Star))), ('*', "Asterisk", Some(token::BinOp(token::Star))),
('/', "Slash", Some(token::BinOp(token::Slash))), ('/', "Slash", Some(token::BinOp(token::Slash))),
('\\', "Backslash", None), ('\\', "Backslash", None),

View file

@ -1,7 +1,7 @@
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle}; use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::attr; use rustc_ast::attr;
use rustc_ast::token::{self, Nonterminal}; use rustc_ast::token::{self, Delimiter, Nonterminal};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::{error_code, Diagnostic, PResult}; use rustc_errors::{error_code, Diagnostic, PResult};
use rustc_span::{sym, BytePos, Span}; use rustc_span::{sym, BytePos, Span};
@ -130,9 +130,9 @@ pub fn parse_attribute(
ast::AttrStyle::Outer ast::AttrStyle::Outer
}; };
this.expect(&token::OpenDelim(token::Bracket))?; this.expect(&token::OpenDelim(Delimiter::Bracket))?;
let item = this.parse_attr_item(false)?; let item = this.parse_attr_item(false)?;
this.expect(&token::CloseDelim(token::Bracket))?; this.expect(&token::CloseDelim(Delimiter::Bracket))?;
let attr_sp = lo.to(this.prev_token.span); let attr_sp = lo.to(this.prev_token.span);
// Emit error if inner attribute is encountered and forbidden. // Emit error if inner attribute is encountered and forbidden.
@ -403,7 +403,7 @@ pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) { Ok(if self.eat(&token::Eq) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
} else if self.check(&token::OpenDelim(token::Paren)) { } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`. // Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?; let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
ast::MetaItemKind::List(list) ast::MetaItemKind::List(list)

View file

@ -1,5 +1,5 @@
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
use rustc_ast::token::{self, DelimToken, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream}; use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream};
use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing}; use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::{self as ast}; use rustc_ast::{self as ast};
@ -388,11 +388,11 @@ pub fn collect_tokens_trailing_token<R: AstLike>(
/// Converts a flattened iterator of tokens (including open and close delimiter tokens) /// Converts a flattened iterator of tokens (including open and close delimiter tokens)
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair /// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
/// of open and close delims. /// of open and close delims.
// FIXME(#67062): Currently, we don't parse `None`-delimited groups correctly, // FIXME(#67062): Currently, we don't parse `Invisible`-delimited groups correctly,
// which can cause us to end up with mismatched `None` delimiters in our // which can cause us to end up with mismatched `Invisible` delimiters in our
// captured tokens. This function contains several hacks to work around this - // captured tokens. This function contains several hacks to work around this -
// essentially, we throw away mismatched `None` delimiters when we encounter them. // essentially, we throw away mismatched `Invisible` delimiters when we encounter them.
// Once we properly parse `None` delimiters, they can be captured just like any // Once we properly parse `Invisible` delimiters, they can be captured just like any
// other tokens, and these hacks can be removed. // other tokens, and these hacks can be removed.
fn make_token_stream( fn make_token_stream(
mut iter: impl Iterator<Item = (FlatToken, Spacing)>, mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
@ -401,7 +401,7 @@ fn make_token_stream(
#[derive(Debug)] #[derive(Debug)]
struct FrameData { struct FrameData {
// This is `None` for the first frame, `Some` for all others. // This is `None` for the first frame, `Some` for all others.
open_delim_sp: Option<(DelimToken, Span)>, open_delim_sp: Option<(Delimiter, Span)>,
inner: Vec<(AttrAnnotatedTokenTree, Spacing)>, inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
} }
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }]; let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
@ -412,13 +412,13 @@ struct FrameData {
stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] }); stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
} }
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => { FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
// HACK: If we encounter a mismatched `None` delimiter at the top // HACK: If we encounter a mismatched `Invisible` delimiter at the top
// level, just ignore it. // level, just ignore it.
if matches!(delim, DelimToken::NoDelim) if matches!(delim, Delimiter::Invisible)
&& (stack.len() == 1 && (stack.len() == 1
|| !matches!( || !matches!(
stack.last_mut().unwrap().open_delim_sp.unwrap().0, stack.last_mut().unwrap().open_delim_sp.unwrap().0,
DelimToken::NoDelim Delimiter::Invisible
)) ))
{ {
token_and_spacing = iter.next(); token_and_spacing = iter.next();
@ -428,11 +428,11 @@ struct FrameData {
.pop() .pop()
.unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token)); .unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
// HACK: If our current frame has a mismatched opening `None` delimiter, // HACK: If our current frame has a mismatched opening `Invisible` delimiter,
// merge our current frame with the one above it. That is, transform // merge our current frame with the one above it. That is, transform
// `[ { < first second } third ]` into `[ { first second } third ]` // `[ { < first second } third ]` into `[ { first second } third ]`
if !matches!(delim, DelimToken::NoDelim) if !matches!(delim, Delimiter::Invisible)
&& matches!(frame_data.open_delim_sp.unwrap().0, DelimToken::NoDelim) && matches!(frame_data.open_delim_sp.unwrap().0, Delimiter::Invisible)
{ {
stack.last_mut().unwrap().inner.extend(frame_data.inner); stack.last_mut().unwrap().inner.extend(frame_data.inner);
// Process our closing delimiter again, this time at the previous // Process our closing delimiter again, this time at the previous
@ -472,10 +472,10 @@ struct FrameData {
} }
token_and_spacing = iter.next(); token_and_spacing = iter.next();
} }
// HACK: If we don't have a closing `None` delimiter for our last // HACK: If we don't have a closing `Invisible` delimiter for our last
// frame, merge the frame with the top-level frame. That is, // frame, merge the frame with the top-level frame. That is,
// turn `< first second` into `first second` // turn `< first second` into `first second`
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == DelimToken::NoDelim { if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == Delimiter::Invisible {
let temp_buf = stack.pop().unwrap(); let temp_buf = stack.pop().unwrap();
stack.last_mut().unwrap().inner.extend(temp_buf.inner); stack.last_mut().unwrap().inner.extend(temp_buf.inner);
} }

View file

@ -8,7 +8,7 @@
use crate::lexer::UnmatchedBrace; use crate::lexer::UnmatchedBrace;
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Lit, LitKind, TokenKind}; use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
use rustc_ast::util::parser::AssocOp; use rustc_ast::util::parser::AssocOp;
use rustc_ast::{ use rustc_ast::{
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block, AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
@ -337,10 +337,10 @@ pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a, ErrorGuarante
TokenKind::Comma, TokenKind::Comma,
TokenKind::Semi, TokenKind::Semi,
TokenKind::ModSep, TokenKind::ModSep,
TokenKind::OpenDelim(token::DelimToken::Brace), TokenKind::OpenDelim(Delimiter::Brace),
TokenKind::OpenDelim(token::DelimToken::Paren), TokenKind::OpenDelim(Delimiter::Parenthesis),
TokenKind::CloseDelim(token::DelimToken::Brace), TokenKind::CloseDelim(Delimiter::Brace),
TokenKind::CloseDelim(token::DelimToken::Paren), TokenKind::CloseDelim(Delimiter::Parenthesis),
]; ];
match self.token.ident() { match self.token.ident() {
Some((ident, false)) Some((ident, false))
@ -413,7 +413,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) { } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
// The current token is in the same line as the prior token, not recoverable. // The current token is in the same line as the prior token, not recoverable.
} else if [token::Comma, token::Colon].contains(&self.token.kind) } else if [token::Comma, token::Colon].contains(&self.token.kind)
&& self.prev_token.kind == token::CloseDelim(token::Paren) && self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis)
{ {
// Likely typo: The current token is on a new line and is expected to be // Likely typo: The current token is on a new line and is expected to be
// `.`, `;`, `?`, or an operator after a close delimiter token. // `.`, `;`, `?`, or an operator after a close delimiter token.
@ -424,7 +424,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
// ^ // ^
// https://github.com/rust-lang/rust/issues/72253 // https://github.com/rust-lang/rust/issues/72253
} else if self.look_ahead(1, |t| { } else if self.look_ahead(1, |t| {
t == &token::CloseDelim(token::Brace) t == &token::CloseDelim(Delimiter::Brace)
|| t.can_begin_expr() && t.kind != token::Colon || t.can_begin_expr() && t.kind != token::Colon
}) && [token::Comma, token::Colon].contains(&self.token.kind) }) && [token::Comma, token::Colon].contains(&self.token.kind)
{ {
@ -441,7 +441,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
.emit(); .emit();
return Ok(true); return Ok(true);
} else if self.look_ahead(0, |t| { } else if self.look_ahead(0, |t| {
t == &token::CloseDelim(token::Brace) t == &token::CloseDelim(Delimiter::Brace)
|| (t.can_begin_expr() && t != &token::Semi && t != &token::Pound) || (t.can_begin_expr() && t != &token::Semi && t != &token::Pound)
// Avoid triggering with too many trailing `#` in raw string. // Avoid triggering with too many trailing `#` in raw string.
|| (sm.is_multiline( || (sm.is_multiline(
@ -655,7 +655,7 @@ pub fn maybe_suggest_struct_literal(
(Err(snapshot_err), Err(err)) => { (Err(snapshot_err), Err(err)) => {
// We don't know what went wrong, emit the normal error. // We don't know what went wrong, emit the normal error.
snapshot_err.cancel(); snapshot_err.cancel();
self.consume_block(token::Brace, ConsumeClosingDelim::Yes); self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
Err(err) Err(err)
} }
(Ok(_), Ok(mut tail)) => { (Ok(_), Ok(mut tail)) => {
@ -866,7 +866,7 @@ pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut Pa
trailing_span = trailing_span.to(self.token.span); trailing_span = trailing_span.to(self.token.span);
self.bump(); self.bump();
} }
if self.token.kind == token::OpenDelim(token::Paren) { if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
// Recover from bad turbofish: `foo.collect::Vec<_>()`. // Recover from bad turbofish: `foo.collect::Vec<_>()`.
let args = AngleBracketedArgs { args, span }.into(); let args = AngleBracketedArgs { args, span }.into();
segment.args = args; segment.args = args;
@ -1098,7 +1098,7 @@ pub(super) fn check_no_chained_comparison(
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)]; [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
self.consume_tts(1, &modifiers); self.consume_tts(1, &modifiers);
if !&[token::OpenDelim(token::Paren), token::ModSep] if !&[token::OpenDelim(Delimiter::Parenthesis), token::ModSep]
.contains(&self.token.kind) .contains(&self.token.kind)
{ {
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
@ -1132,7 +1132,7 @@ pub(super) fn check_no_chained_comparison(
Err(err) Err(err)
} }
} }
} else if token::OpenDelim(token::Paren) == self.token.kind { } else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
// We have high certainty that this was a bad turbofish at this point. // We have high certainty that this was a bad turbofish at this point.
// `foo< bar >(` // `foo< bar >(`
suggest(&mut err); suggest(&mut err);
@ -1186,8 +1186,10 @@ fn consume_fn_args(&mut self) -> Result<(), ()> {
self.bump(); // `(` self.bump(); // `(`
// Consume the fn call arguments. // Consume the fn call arguments.
let modifiers = let modifiers = [
[(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)]; (token::OpenDelim(Delimiter::Parenthesis), 1),
(token::CloseDelim(Delimiter::Parenthesis), -1),
];
self.consume_tts(1, &modifiers); self.consume_tts(1, &modifiers);
if self.token.kind == token::Eof { if self.token.kind == token::Eof {
@ -1579,15 +1581,15 @@ pub(super) fn recover_incorrect_await_syntax(
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> { fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
self.expect(&token::Not)?; self.expect(&token::Not)?;
self.expect(&token::OpenDelim(token::Paren))?; self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let expr = self.parse_expr()?; let expr = self.parse_expr()?;
self.expect(&token::CloseDelim(token::Paren))?; self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
Ok((self.prev_token.span, expr, false)) Ok((self.prev_token.span, expr, false))
} }
fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> { fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
let is_question = self.eat(&token::Question); // Handle `await? <expr>`. let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
let expr = if self.token == token::OpenDelim(token::Brace) { let expr = if self.token == token::OpenDelim(Delimiter::Brace) {
// Handle `await { <expr> }`. // Handle `await { <expr> }`.
// This needs to be handled separately from the next arm to avoid // This needs to be handled separately from the next arm to avoid
// interpreting `await { <expr> }?` as `<expr>?.await`. // interpreting `await { <expr> }?` as `<expr>?.await`.
@ -1619,8 +1621,8 @@ fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question:
/// If encountering `future.await()`, consumes and emits an error. /// If encountering `future.await()`, consumes and emits an error.
pub(super) fn recover_from_await_method_call(&mut self) { pub(super) fn recover_from_await_method_call(&mut self) {
if self.token == token::OpenDelim(token::Paren) if self.token == token::OpenDelim(Delimiter::Parenthesis)
&& self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
{ {
// future.await() // future.await()
let lo = self.token.span; let lo = self.token.span;
@ -1641,7 +1643,7 @@ pub(super) fn recover_from_await_method_call(&mut self) {
pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> { pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
let is_try = self.token.is_keyword(kw::Try); let is_try = self.token.is_keyword(kw::Try);
let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for ! let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(token::Paren)); //check for ( let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
if is_try && is_questionmark && is_open { if is_try && is_questionmark && is_open {
let lo = self.token.span; let lo = self.token.span;
@ -1649,8 +1651,8 @@ pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
self.bump(); //remove ! self.bump(); //remove !
let try_span = lo.to(self.token.span); //we take the try!( span let try_span = lo.to(self.token.span); //we take the try!( span
self.bump(); //remove ( self.bump(); //remove (
let is_empty = self.token == token::CloseDelim(token::Paren); //check if the block is empty let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty
self.consume_block(token::Paren, ConsumeClosingDelim::No); //eat the block self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block
let hi = self.token.span; let hi = self.token.span;
self.bump(); //remove ) self.bump(); //remove )
let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro"); let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro");
@ -1681,7 +1683,7 @@ pub(super) fn recover_parens_around_for_head(
begin_paren: Option<Span>, begin_paren: Option<Span>,
) -> P<Pat> { ) -> P<Pat> {
match (&self.token.kind, begin_paren) { match (&self.token.kind, begin_paren) {
(token::CloseDelim(token::Paren), Some(begin_par_sp)) => { (token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
self.bump(); self.bump();
self.struct_span_err( self.struct_span_err(
@ -1714,8 +1716,8 @@ pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
|| self.token.is_ident() && || self.token.is_ident() &&
matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) && matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) &&
!self.token.is_reserved_ident() && // v `foo:bar(baz)` !self.token.is_reserved_ident() && // v `foo:bar(baz)`
self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Parenthesis))
|| self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) // `foo:bar {` || self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) // `foo:bar {`
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz` || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
self.look_ahead(2, |t| t == &token::Lt) && self.look_ahead(2, |t| t == &token::Lt) &&
self.look_ahead(3, |t| t.is_ident()) self.look_ahead(3, |t| t.is_ident())
@ -1728,7 +1730,7 @@ pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
pub(super) fn recover_seq_parse_error( pub(super) fn recover_seq_parse_error(
&mut self, &mut self,
delim: token::DelimToken, delim: Delimiter,
lo: Span, lo: Span,
result: PResult<'a, P<Expr>>, result: PResult<'a, P<Expr>>,
) -> P<Expr> { ) -> P<Expr> {
@ -1845,7 +1847,7 @@ pub(super) fn recover_stmt_(
loop { loop {
debug!("recover_stmt_ loop {:?}", self.token); debug!("recover_stmt_ loop {:?}", self.token);
match self.token.kind { match self.token.kind {
token::OpenDelim(token::DelimToken::Brace) => { token::OpenDelim(Delimiter::Brace) => {
brace_depth += 1; brace_depth += 1;
self.bump(); self.bump();
if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0 if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
@ -1853,11 +1855,11 @@ pub(super) fn recover_stmt_(
in_block = true; in_block = true;
} }
} }
token::OpenDelim(token::DelimToken::Bracket) => { token::OpenDelim(Delimiter::Bracket) => {
bracket_depth += 1; bracket_depth += 1;
self.bump(); self.bump();
} }
token::CloseDelim(token::DelimToken::Brace) => { token::CloseDelim(Delimiter::Brace) => {
if brace_depth == 0 { if brace_depth == 0 {
debug!("recover_stmt_ return - close delim {:?}", self.token); debug!("recover_stmt_ return - close delim {:?}", self.token);
break; break;
@ -1869,7 +1871,7 @@ pub(super) fn recover_stmt_(
break; break;
} }
} }
token::CloseDelim(token::DelimToken::Bracket) => { token::CloseDelim(Delimiter::Bracket) => {
bracket_depth -= 1; bracket_depth -= 1;
if bracket_depth < 0 { if bracket_depth < 0 {
bracket_depth = 0; bracket_depth = 0;
@ -1927,11 +1929,11 @@ pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
.emit(); .emit();
self.bump(); self.bump();
} else if self.token == token::Pound } else if self.token == token::Pound
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket)) && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
{ {
let lo = self.token.span; let lo = self.token.span;
// Skip every token until next possible arg. // Skip every token until next possible arg.
while self.token != token::CloseDelim(token::Bracket) { while self.token != token::CloseDelim(Delimiter::Bracket) {
self.bump(); self.bump();
} }
let sp = lo.to(self.token.span); let sp = lo.to(self.token.span);
@ -1952,7 +1954,9 @@ pub(super) fn parameter_without_type(
// If we find a pattern followed by an identifier, it could be an (incorrect) // If we find a pattern followed by an identifier, it could be an (incorrect)
// C-style parameter declaration. // C-style parameter declaration.
if self.check_ident() if self.check_ident()
&& self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren)) && self.look_ahead(1, |t| {
*t == token::Comma || *t == token::CloseDelim(Delimiter::Parenthesis)
})
{ {
// `fn foo(String s) {}` // `fn foo(String s) {}`
let ident = self.parse_ident().unwrap(); let ident = self.parse_ident().unwrap();
@ -1968,7 +1972,7 @@ pub(super) fn parameter_without_type(
} else if require_name } else if require_name
&& (self.token == token::Comma && (self.token == token::Comma
|| self.token == token::Lt || self.token == token::Lt
|| self.token == token::CloseDelim(token::Paren)) || self.token == token::CloseDelim(Delimiter::Parenthesis))
{ {
let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)"; let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)";
@ -2086,11 +2090,7 @@ pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a,
Ok(param) Ok(param)
} }
pub(super) fn consume_block( pub(super) fn consume_block(&mut self, delim: Delimiter, consume_close: ConsumeClosingDelim) {
&mut self,
delim: token::DelimToken,
consume_close: ConsumeClosingDelim,
) {
let mut brace_depth = 0; let mut brace_depth = 0;
loop { loop {
if self.eat(&token::OpenDelim(delim)) { if self.eat(&token::OpenDelim(delim)) {
@ -2109,7 +2109,8 @@ pub(super) fn consume_block(
brace_depth -= 1; brace_depth -= 1;
continue; continue;
} }
} else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) { } else if self.token == token::Eof || self.eat(&token::CloseDelim(Delimiter::Invisible))
{
return; return;
} else { } else {
self.bump(); self.bump();
@ -2555,7 +2556,7 @@ pub(super) fn incorrect_move_async_order_found(
crate fn maybe_recover_unexpected_block_label(&mut self) -> bool { crate fn maybe_recover_unexpected_block_label(&mut self) -> bool {
let Some(label) = self.eat_label().filter(|_| { let Some(label) = self.eat_label().filter(|_| {
self.eat(&token::Colon) && self.token.kind == token::OpenDelim(token::Brace) self.eat(&token::Colon) && self.token.kind == token::OpenDelim(Delimiter::Brace)
}) else { }) else {
return false; return false;
}; };
@ -2652,7 +2653,7 @@ pub(super) fn incorrect_move_async_order_found(
/// Parse and throw away a parenthesized comma separated /// Parse and throw away a parenthesized comma separated
/// sequence of patterns until `)` is reached. /// sequence of patterns until `)` is reached.
fn skip_pat_list(&mut self) -> PResult<'a, ()> { fn skip_pat_list(&mut self) -> PResult<'a, ()> {
while !self.check(&token::CloseDelim(token::Paren)) { while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) {
self.parse_pat_no_top_alt(None)?; self.parse_pat_no_top_alt(None)?;
if !self.eat(&token::Comma) { if !self.eat(&token::Comma) {
return Ok(()); return Ok(());

View file

@ -7,9 +7,8 @@
}; };
use crate::maybe_recover_from_interpolated_ty_qpath; use crate::maybe_recover_from_interpolated_ty_qpath;
use ast::token::DelimToken;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::Spacing; use rustc_ast::tokenstream::Spacing;
use rustc_ast::util::classify; use rustc_ast::util::classify;
use rustc_ast::util::literal::LitError; use rustc_ast::util::literal::LitError;
@ -495,7 +494,7 @@ fn parse_range_expr(
fn is_at_start_of_range_notation_rhs(&self) -> bool { fn is_at_start_of_range_notation_rhs(&self) -> bool {
if self.token.can_begin_expr() { if self.token.can_begin_expr() {
// Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
if self.token == token::OpenDelim(token::Brace) { if self.token == token::OpenDelim(Delimiter::Brace) {
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
} }
true true
@ -992,8 +991,8 @@ fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<
return Ok(e); return Ok(e);
} }
e = match self.token.kind { e = match self.token.kind {
token::OpenDelim(token::Paren) => self.parse_fn_call_expr(lo, e), token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e),
token::OpenDelim(token::Bracket) => self.parse_index_expr(lo, e)?, token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?,
_ => return Ok(e), _ => return Ok(e),
} }
} }
@ -1156,7 +1155,7 @@ fn parse_tuple_field_access_expr(
/// Parse a function call expression, `expr(...)`. /// Parse a function call expression, `expr(...)`.
fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> { fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
let snapshot = if self.token.kind == token::OpenDelim(token::Paren) let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
&& self.look_ahead_type_ascription_as_field() && self.look_ahead_type_ascription_as_field()
{ {
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone())) Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
@ -1173,7 +1172,7 @@ fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
{ {
return expr; return expr;
} }
self.recover_seq_parse_error(token::Paren, lo, seq) self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq)
} }
/// If we encounter a parser state that looks like the user has written a `struct` literal with /// If we encounter a parser state that looks like the user has written a `struct` literal with
@ -1190,8 +1189,10 @@ fn maybe_recover_struct_lit_bad_delims(
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => { (Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
let name = pprust::path_to_string(&path); let name = pprust::path_to_string(&path);
snapshot.bump(); // `(` snapshot.bump(); // `(`
match snapshot.parse_struct_fields(path, false, token::Paren) { match snapshot.parse_struct_fields(path, false, Delimiter::Parenthesis) {
Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => { Ok((fields, ..))
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
{
// We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
self.restore_snapshot(snapshot); self.restore_snapshot(snapshot);
@ -1241,7 +1242,7 @@ fn maybe_recover_struct_lit_bad_delims(
fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> { fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
self.bump(); // `[` self.bump(); // `[`
let index = self.parse_expr()?; let index = self.parse_expr()?;
self.expect(&token::CloseDelim(token::Bracket))?; self.expect(&token::CloseDelim(Delimiter::Bracket))?;
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new())) Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new()))
} }
@ -1253,10 +1254,10 @@ fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Exp
let fn_span_lo = self.token.span; let fn_span_lo = self.token.span;
let mut segment = self.parse_path_segment(PathStyle::Expr, None)?; let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(token::Paren)]); self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
self.check_turbofish_missing_angle_brackets(&mut segment); self.check_turbofish_missing_angle_brackets(&mut segment);
if self.check(&token::OpenDelim(token::Paren)) { if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Method call `expr.f()` // Method call `expr.f()`
let mut args = self.parse_paren_expr_seq()?; let mut args = self.parse_paren_expr_seq()?;
args.insert(0, self_arg); args.insert(0, self_arg);
@ -1302,9 +1303,9 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
// could be removed without changing functionality, but it's faster // could be removed without changing functionality, but it's faster
// to have it here, especially for programs with large constants. // to have it here, especially for programs with large constants.
self.parse_lit_expr(attrs) self.parse_lit_expr(attrs)
} else if self.check(&token::OpenDelim(token::Paren)) { } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
self.parse_tuple_parens_expr(attrs) self.parse_tuple_parens_expr(attrs)
} else if self.check(&token::OpenDelim(token::Brace)) { } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs) self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs)
} else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) { } else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
self.parse_closure_expr(attrs).map_err(|mut err| { self.parse_closure_expr(attrs).map_err(|mut err| {
@ -1315,8 +1316,8 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
} }
err err
}) })
} else if self.check(&token::OpenDelim(token::Bracket)) { } else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
self.parse_array_or_repeat_expr(attrs, token::Bracket) self.parse_array_or_repeat_expr(attrs, Delimiter::Bracket)
} else if self.check_path() { } else if self.check_path() {
self.parse_path_start_expr(attrs) self.parse_path_start_expr(attrs)
} else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) { } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
@ -1422,14 +1423,16 @@ fn parse_lit_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> { fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
let lo = self.token.span; let lo = self.token.span;
self.expect(&token::OpenDelim(token::Paren))?; self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let (es, trailing_comma) = match self.parse_seq_to_end( let (es, trailing_comma) = match self.parse_seq_to_end(
&token::CloseDelim(token::Paren), &token::CloseDelim(Delimiter::Parenthesis),
SeqSep::trailing_allowed(token::Comma), SeqSep::trailing_allowed(token::Comma),
|p| p.parse_expr_catch_underscore(), |p| p.parse_expr_catch_underscore(),
) { ) {
Ok(x) => x, Ok(x) => x,
Err(err) => return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err))), Err(err) => {
return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err)));
}
}; };
let kind = if es.len() == 1 && !trailing_comma { let kind = if es.len() == 1 && !trailing_comma {
// `(e)` is parenthesized `e`. // `(e)` is parenthesized `e`.
@ -1445,7 +1448,7 @@ fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
fn parse_array_or_repeat_expr( fn parse_array_or_repeat_expr(
&mut self, &mut self,
attrs: AttrVec, attrs: AttrVec,
close_delim: token::DelimToken, close_delim: Delimiter,
) -> PResult<'a, P<Expr>> { ) -> PResult<'a, P<Expr>> {
let lo = self.token.span; let lo = self.token.span;
self.bump(); // `[` or other open delim self.bump(); // `[` or other open delim
@ -1500,7 +1503,7 @@ fn parse_path_start_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
prior_type_ascription: self.last_type_ascription, prior_type_ascription: self.last_type_ascription,
}; };
(self.prev_token.span, ExprKind::MacCall(mac)) (self.prev_token.span, ExprKind::MacCall(mac))
} else if self.check(&token::OpenDelim(token::Brace)) { } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) { if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) {
if qself.is_some() { if qself.is_some() {
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span); self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
@ -1533,7 +1536,7 @@ fn parse_labeled_expr(
self.parse_for_expr(label, lo, attrs) self.parse_for_expr(label, lo, attrs)
} else if self.eat_keyword(kw::Loop) { } else if self.eat_keyword(kw::Loop) {
self.parse_loop_expr(label, lo, attrs) self.parse_loop_expr(label, lo, attrs)
} else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() { } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs) self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs)
} else if !ate_colon && (self.check(&TokenKind::Comma) || self.check(&TokenKind::Gt)) { } else if !ate_colon && (self.check(&TokenKind::Comma) || self.check(&TokenKind::Gt)) {
// We're probably inside of a `Path<'a>` that needs a turbofish // We're probably inside of a `Path<'a>` that needs a turbofish
@ -1631,7 +1634,7 @@ fn parse_break_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
) )
.emit(); .emit();
Some(lexpr) Some(lexpr)
} else if self.token != token::OpenDelim(token::Brace) } else if self.token != token::OpenDelim(Delimiter::Brace)
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
{ {
let expr = self.parse_expr_opt()?; let expr = self.parse_expr_opt()?;
@ -1940,7 +1943,7 @@ fn maybe_suggest_brackets_instead_of_braces(
attrs: AttrVec, attrs: AttrVec,
) -> Option<P<Expr>> { ) -> Option<P<Expr>> {
let mut snapshot = self.create_snapshot_for_diagnostic(); let mut snapshot = self.create_snapshot_for_diagnostic();
match snapshot.parse_array_or_repeat_expr(attrs, token::Brace) { match snapshot.parse_array_or_repeat_expr(attrs, Delimiter::Brace) {
Ok(arr) => { Ok(arr) => {
let hi = snapshot.prev_token.span; let hi = snapshot.prev_token.span;
self.struct_span_err(arr.span, "this is a block expression, not an array") self.struct_span_err(arr.span, "this is a block expression, not an array")
@ -2044,7 +2047,7 @@ fn parse_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
} }
if self.token.kind == TokenKind::Semi if self.token.kind == TokenKind::Semi
&& matches!(self.token_cursor.frame.delim_sp, Some((DelimToken::Paren, _))) && matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
{ {
// It is likely that the closure body is a block but where the // It is likely that the closure body is a block but where the
// braces have been removed. We will recover and eat the next // braces have been removed. We will recover and eat the next
@ -2158,7 +2161,7 @@ fn parse_if_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
} }
} else { } else {
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery. let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
let not_block = self.token != token::OpenDelim(token::Brace); let not_block = self.token != token::OpenDelim(Delimiter::Brace);
let block = self.parse_block().map_err(|err| { let block = self.parse_block().map_err(|err| {
if not_block { if not_block {
self.error_missing_if_then_block(lo, Some(err), missing_then_block_binop_span()) self.error_missing_if_then_block(lo, Some(err), missing_then_block_binop_span())
@ -2283,7 +2286,7 @@ fn parse_for_expr(
// This is used below for recovery in case of `for ( $stuff ) $block` // This is used below for recovery in case of `for ( $stuff ) $block`
// in which case we will suggest `for $stuff $block`. // in which case we will suggest `for $stuff $block`.
let begin_paren = match self.token.kind { let begin_paren = match self.token.kind {
token::OpenDelim(token::Paren) => Some(self.token.span), token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span),
_ => None, _ => None,
}; };
@ -2372,7 +2375,7 @@ fn parse_match_expr(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
let match_span = self.prev_token.span; let match_span = self.prev_token.span;
let lo = self.prev_token.span; let lo = self.prev_token.span;
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
if self.token == token::Semi { if self.token == token::Semi {
e.span_suggestion_short( e.span_suggestion_short(
match_span, match_span,
@ -2391,7 +2394,7 @@ fn parse_match_expr(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
attrs.extend(self.parse_inner_attributes()?); attrs.extend(self.parse_inner_attributes()?);
let mut arms: Vec<Arm> = Vec::new(); let mut arms: Vec<Arm> = Vec::new();
while self.token != token::CloseDelim(token::Brace) { while self.token != token::CloseDelim(Delimiter::Brace) {
match self.parse_arm() { match self.parse_arm() {
Ok(arm) => arms.push(arm), Ok(arm) => arms.push(arm),
Err(mut e) => { Err(mut e) => {
@ -2399,7 +2402,7 @@ fn parse_match_expr(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
e.emit(); e.emit();
self.recover_stmt(); self.recover_stmt();
let span = lo.to(self.token.span); let span = lo.to(self.token.span);
if self.token == token::CloseDelim(token::Brace) { if self.token == token::CloseDelim(Delimiter::Brace) {
self.bump(); self.bump();
} }
return Ok(self.mk_expr(span, ExprKind::Match(scrutinee, arms), attrs)); return Ok(self.mk_expr(span, ExprKind::Match(scrutinee, arms), attrs));
@ -2463,7 +2466,7 @@ fn parse_arm_body_missing_braces(
// We might have either a `,` -> `;` typo, or a block without braces. We need // We might have either a `,` -> `;` typo, or a block without braces. We need
// a more subtle parsing strategy. // a more subtle parsing strategy.
loop { loop {
if self.token.kind == token::CloseDelim(token::Brace) { if self.token.kind == token::CloseDelim(Delimiter::Brace) {
// We have reached the closing brace of the `match` expression. // We have reached the closing brace of the `match` expression.
return Some(err(self, stmts)); return Some(err(self, stmts));
} }
@ -2571,7 +2574,7 @@ fn check_let_expr(expr: &Expr) -> (bool, bool) {
})?; })?;
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr) let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
&& this.token != token::CloseDelim(token::Brace); && this.token != token::CloseDelim(Delimiter::Brace);
let hi = this.prev_token.span; let hi = this.prev_token.span;
@ -2592,8 +2595,8 @@ fn check_let_expr(expr: &Expr) -> (bool, bool) {
TrailingToken::None, TrailingToken::None,
)); ));
} }
this.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err( this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
|mut err| { .map_err(|mut err| {
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) { match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
(Ok(ref expr_lines), Ok(ref arm_start_lines)) (Ok(ref expr_lines), Ok(ref arm_start_lines))
if arm_start_lines.lines[0].end_col if arm_start_lines.lines[0].end_col
@ -2627,8 +2630,7 @@ fn check_let_expr(expr: &Expr) -> (bool, bool) {
} }
} }
err err
}, })?;
)?;
} else { } else {
this.eat(&token::Comma); this.eat(&token::Comma);
} }
@ -2670,13 +2672,13 @@ fn parse_try_block(&mut self, span_lo: Span, mut attrs: AttrVec) -> PResult<'a,
fn is_do_catch_block(&self) -> bool { fn is_do_catch_block(&self) -> bool {
self.token.is_keyword(kw::Do) self.token.is_keyword(kw::Do)
&& self.is_keyword_ahead(1, &[kw::Catch]) && self.is_keyword_ahead(1, &[kw::Catch])
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
&& !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
} }
fn is_try_block(&self) -> bool { fn is_try_block(&self) -> bool {
self.token.is_keyword(kw::Try) self.token.is_keyword(kw::Try)
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
&& self.token.uninterpolated_span().rust_2018() && self.token.uninterpolated_span().rust_2018()
} }
@ -2696,10 +2698,10 @@ fn is_async_block(&self) -> bool {
&& (( && ((
// `async move {` // `async move {`
self.is_keyword_ahead(1, &[kw::Move]) self.is_keyword_ahead(1, &[kw::Move])
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
) || ( ) || (
// `async {` // `async {`
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
)) ))
} }
@ -2726,7 +2728,7 @@ fn maybe_parse_struct_expr(
) -> Option<PResult<'a, P<Expr>>> { ) -> Option<PResult<'a, P<Expr>>> {
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
if struct_allowed || self.is_certainly_not_a_block() { if struct_allowed || self.is_certainly_not_a_block() {
if let Err(err) = self.expect(&token::OpenDelim(token::Brace)) { if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
return Some(Err(err)); return Some(Err(err));
} }
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true); let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true);
@ -2753,7 +2755,7 @@ pub(super) fn parse_struct_fields(
&mut self, &mut self,
pth: ast::Path, pth: ast::Path,
recover: bool, recover: bool,
close_delim: token::DelimToken, close_delim: Delimiter,
) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> { ) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
let mut fields = Vec::new(); let mut fields = Vec::new();
let mut base = ast::StructRest::None; let mut base = ast::StructRest::None;
@ -2853,9 +2855,9 @@ pub(super) fn parse_struct_expr(
) -> PResult<'a, P<Expr>> { ) -> PResult<'a, P<Expr>> {
let lo = pth.span; let lo = pth.span;
let (fields, base, recover_async) = let (fields, base, recover_async) =
self.parse_struct_fields(pth.clone(), recover, token::Brace)?; self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?;
let span = lo.to(self.token.span); let span = lo.to(self.token.span);
self.expect(&token::CloseDelim(token::Brace))?; self.expect(&token::CloseDelim(Delimiter::Brace))?;
let expr = if recover_async { let expr = if recover_async {
ExprKind::Err ExprKind::Err
} else { } else {

View file

@ -4,7 +4,7 @@
use rustc_ast::ast::*; use rustc_ast::ast::*;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, TokenKind}; use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID}; use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind}; use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
@ -39,9 +39,9 @@ fn parse_item_mod(&mut self, attrs: &mut Vec<Attribute>) -> PResult<'a, ItemInfo
let mod_kind = if self.eat(&token::Semi) { let mod_kind = if self.eat(&token::Semi) {
ModKind::Unloaded ModKind::Unloaded
} else { } else {
self.expect(&token::OpenDelim(token::Brace))?; self.expect(&token::OpenDelim(Delimiter::Brace))?;
let (mut inner_attrs, items, inner_span) = let (mut inner_attrs, items, inner_span) =
self.parse_mod(&token::CloseDelim(token::Brace))?; self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
attrs.append(&mut inner_attrs); attrs.append(&mut inner_attrs);
ModKind::Loaded(items, Inline::Yes, inner_span) ModKind::Loaded(items, Inline::Yes, inner_span)
}; };
@ -324,7 +324,7 @@ fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
let sp = self.prev_token.span.between(self.token.span); let sp = self.prev_token.span.between(self.token.span);
let full_sp = self.prev_token.span.to(self.token.span); let full_sp = self.prev_token.span.to(self.token.span);
let ident_sp = self.token.span; let ident_sp = self.token.span;
if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) { if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) {
// possible public struct definition where `struct` was forgotten // possible public struct definition where `struct` was forgotten
let ident = self.parse_ident().unwrap(); let ident = self.parse_ident().unwrap();
let msg = format!("add `struct` here to parse `{ident}` as a public struct"); let msg = format!("add `struct` here to parse `{ident}` as a public struct");
@ -336,16 +336,16 @@ fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
Applicability::MaybeIncorrect, // speculative Applicability::MaybeIncorrect, // speculative
); );
Err(err) Err(err)
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) { } else if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)) {
let ident = self.parse_ident().unwrap(); let ident = self.parse_ident().unwrap();
self.bump(); // `(` self.bump(); // `(`
let kw_name = self.recover_first_param(); let kw_name = self.recover_first_param();
self.consume_block(token::Paren, ConsumeClosingDelim::Yes); self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) { let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]); self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
self.bump(); // `{` self.bump(); // `{`
("fn", kw_name, false) ("fn", kw_name, false)
} else if self.check(&token::OpenDelim(token::Brace)) { } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
self.bump(); // `{` self.bump(); // `{`
("fn", kw_name, false) ("fn", kw_name, false)
} else if self.check(&token::Colon) { } else if self.check(&token::Colon) {
@ -358,7 +358,7 @@ fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
let msg = format!("missing `{kw}` for {kw_name} definition"); let msg = format!("missing `{kw}` for {kw_name} definition");
let mut err = self.struct_span_err(sp, &msg); let mut err = self.struct_span_err(sp, &msg);
if !ambiguous { if !ambiguous {
self.consume_block(token::Brace, ConsumeClosingDelim::Yes); self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
let suggestion = let suggestion =
format!("add `{kw}` here to parse `{ident}` as a public {kw_name}"); format!("add `{kw}` here to parse `{ident}` as a public {kw_name}");
err.span_suggestion_short( err.span_suggestion_short(
@ -386,9 +386,9 @@ fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
let ident = self.parse_ident().unwrap(); let ident = self.parse_ident().unwrap();
self.eat_to_tokens(&[&token::Gt]); self.eat_to_tokens(&[&token::Gt]);
self.bump(); // `>` self.bump(); // `>`
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) { let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(Delimiter::Parenthesis)) {
("fn", self.recover_first_param(), false) ("fn", self.recover_first_param(), false)
} else if self.check(&token::OpenDelim(token::Brace)) { } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
("struct", "struct", false) ("struct", "struct", false)
} else { } else {
("fn` or `struct", "function or struct", true) ("fn` or `struct", "function or struct", true)
@ -630,11 +630,11 @@ fn parse_item_list<T>(
mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>, mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
) -> PResult<'a, Vec<T>> { ) -> PResult<'a, Vec<T>> {
let open_brace_span = self.token.span; let open_brace_span = self.token.span;
self.expect(&token::OpenDelim(token::Brace))?; self.expect(&token::OpenDelim(Delimiter::Brace))?;
attrs.append(&mut self.parse_inner_attributes()?); attrs.append(&mut self.parse_inner_attributes()?);
let mut items = Vec::new(); let mut items = Vec::new();
while !self.eat(&token::CloseDelim(token::Brace)) { while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
if self.recover_doc_comment_before_brace() { if self.recover_doc_comment_before_brace() {
continue; continue;
} }
@ -642,7 +642,7 @@ fn parse_item_list<T>(
Ok(None) => { Ok(None) => {
// We have to bail or we'll potentially never make progress. // We have to bail or we'll potentially never make progress.
let non_item_span = self.token.span; let non_item_span = self.token.span;
self.consume_block(token::Brace, ConsumeClosingDelim::Yes); self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
self.struct_span_err(non_item_span, "non-item in item list") self.struct_span_err(non_item_span, "non-item in item list")
.span_label(open_brace_span, "item list starts here") .span_label(open_brace_span, "item list starts here")
.span_label(non_item_span, "non-item starts here") .span_label(non_item_span, "non-item starts here")
@ -652,7 +652,7 @@ fn parse_item_list<T>(
} }
Ok(Some(item)) => items.extend(item), Ok(Some(item)) => items.extend(item),
Err(mut err) => { Err(mut err) => {
self.consume_block(token::Brace, ConsumeClosingDelim::Yes); self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
err.span_label(open_brace_span, "while parsing this item list starting here") err.span_label(open_brace_span, "while parsing this item list starting here")
.span_label(self.prev_token.span, "the item list ends here") .span_label(self.prev_token.span, "the item list ends here")
.emit(); .emit();
@ -666,7 +666,7 @@ fn parse_item_list<T>(
/// Recover on a doc comment before `}`. /// Recover on a doc comment before `}`.
fn recover_doc_comment_before_brace(&mut self) -> bool { fn recover_doc_comment_before_brace(&mut self) -> bool {
if let token::DocComment(..) = self.token.kind { if let token::DocComment(..) = self.token.kind {
if self.look_ahead(1, |tok| tok == &token::CloseDelim(token::Brace)) { if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
struct_span_err!( struct_span_err!(
self.diagnostic(), self.diagnostic(),
self.token.span, self.token.span,
@ -866,7 +866,7 @@ fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
let lo = self.token.span; let lo = self.token.span;
let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None }; let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None };
let kind = if self.check(&token::OpenDelim(token::Brace)) let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
|| self.check(&token::BinOp(token::Star)) || self.check(&token::BinOp(token::Star))
|| self.is_import_coupler() || self.is_import_coupler()
{ {
@ -908,7 +908,7 @@ fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> {
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`] /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
/// ``` /// ```
fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> { fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID))) self.parse_delim_comma_seq(Delimiter::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
.map(|(r, _)| r) .map(|(r, _)| r)
} }
@ -1077,7 +1077,7 @@ fn is_unsafe_foreign_mod(&self) -> bool {
&& self.is_keyword_ahead(1, &[kw::Extern]) && self.is_keyword_ahead(1, &[kw::Extern])
&& self.look_ahead( && self.look_ahead(
2 + self.look_ahead(2, |t| t.can_begin_literal_maybe_minus() as usize), 2 + self.look_ahead(2, |t| t.can_begin_literal_maybe_minus() as usize),
|t| t.kind == token::OpenDelim(token::Brace), |t| t.kind == token::OpenDelim(Delimiter::Brace),
) )
} }
@ -1204,8 +1204,9 @@ fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
let mut generics = self.parse_generics()?; let mut generics = self.parse_generics()?;
generics.where_clause = self.parse_where_clause()?; generics.where_clause = self.parse_where_clause()?;
let (variants, _) = let (variants, _) = self
self.parse_delim_comma_seq(token::Brace, |p| p.parse_enum_variant()).map_err(|e| { .parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant())
.map_err(|e| {
self.recover_stmt(); self.recover_stmt();
e e
})?; })?;
@ -1228,11 +1229,11 @@ fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
} }
let ident = this.parse_field_ident("enum", vlo)?; let ident = this.parse_field_ident("enum", vlo)?;
let struct_def = if this.check(&token::OpenDelim(token::Brace)) { let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
// Parse a struct variant. // Parse a struct variant.
let (fields, recovered) = this.parse_record_struct_body("struct", false)?; let (fields, recovered) = this.parse_record_struct_body("struct", false)?;
VariantData::Struct(fields, recovered) VariantData::Struct(fields, recovered)
} else if this.check(&token::OpenDelim(token::Paren)) { } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID) VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
} else { } else {
VariantData::Unit(DUMMY_NODE_ID) VariantData::Unit(DUMMY_NODE_ID)
@ -1292,12 +1293,12 @@ fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
} else if self.eat(&token::Semi) { } else if self.eat(&token::Semi) {
VariantData::Unit(DUMMY_NODE_ID) VariantData::Unit(DUMMY_NODE_ID)
// Record-style struct definition // Record-style struct definition
} else if self.token == token::OpenDelim(token::Brace) { } else if self.token == token::OpenDelim(Delimiter::Brace) {
let (fields, recovered) = let (fields, recovered) =
self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?; self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?;
VariantData::Struct(fields, recovered) VariantData::Struct(fields, recovered)
// Tuple-style struct definition with optional where-clause. // Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(token::Paren) { } else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
generics.where_clause = self.parse_where_clause()?; generics.where_clause = self.parse_where_clause()?;
self.expect_semi()?; self.expect_semi()?;
@ -1326,7 +1327,7 @@ fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
let (fields, recovered) = let (fields, recovered) =
self.parse_record_struct_body("union", generics.where_clause.has_where_token)?; self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
VariantData::Struct(fields, recovered) VariantData::Struct(fields, recovered)
} else if self.token == token::OpenDelim(token::Brace) { } else if self.token == token::OpenDelim(Delimiter::Brace) {
let (fields, recovered) = let (fields, recovered) =
self.parse_record_struct_body("union", generics.where_clause.has_where_token)?; self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
VariantData::Struct(fields, recovered) VariantData::Struct(fields, recovered)
@ -1348,10 +1349,10 @@ fn parse_record_struct_body(
) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> { ) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
let mut fields = Vec::new(); let mut fields = Vec::new();
let mut recovered = false; let mut recovered = false;
if self.eat(&token::OpenDelim(token::Brace)) { if self.eat(&token::OpenDelim(Delimiter::Brace)) {
while self.token != token::CloseDelim(token::Brace) { while self.token != token::CloseDelim(Delimiter::Brace) {
let field = self.parse_field_def(adt_ty).map_err(|e| { let field = self.parse_field_def(adt_ty).map_err(|e| {
self.consume_block(token::Brace, ConsumeClosingDelim::No); self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
recovered = true; recovered = true;
e e
}); });
@ -1363,7 +1364,7 @@ fn parse_record_struct_body(
} }
} }
} }
self.eat(&token::CloseDelim(token::Brace)); self.eat(&token::CloseDelim(Delimiter::Brace));
} else { } else {
let token_str = super::token_descr(&self.token); let token_str = super::token_descr(&self.token);
let msg = &format!( let msg = &format!(
@ -1439,7 +1440,7 @@ fn parse_single_struct_field(
token::Comma => { token::Comma => {
self.bump(); self.bump();
} }
token::CloseDelim(token::Brace) => {} token::CloseDelim(Delimiter::Brace) => {}
token::DocComment(..) => { token::DocComment(..) => {
let previous_span = self.prev_token.span; let previous_span = self.prev_token.span;
let mut err = self.span_err(self.token.span, Error::UselessDocComment); let mut err = self.span_err(self.token.span, Error::UselessDocComment);
@ -1450,7 +1451,7 @@ fn parse_single_struct_field(
if !seen_comma && comma_after_doc_seen { if !seen_comma && comma_after_doc_seen {
seen_comma = true; seen_comma = true;
} }
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) { if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
err.emit(); err.emit();
} else { } else {
if !seen_comma { if !seen_comma {
@ -1478,7 +1479,7 @@ fn parse_single_struct_field(
if let Some(last_segment) = segments.last() { if let Some(last_segment) = segments.last() {
recovered = self.check_trailing_angle_brackets( recovered = self.check_trailing_angle_brackets(
last_segment, last_segment,
&[&token::Comma, &token::CloseDelim(token::Brace)], &[&token::Comma, &token::CloseDelim(Delimiter::Brace)],
); );
if recovered { if recovered {
// Handle a case like `Vec<u8>>,` where we can continue parsing fields // Handle a case like `Vec<u8>>,` where we can continue parsing fields
@ -1636,12 +1637,12 @@ fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
/// ``` /// ```
fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> { fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?; let ident = self.parse_ident()?;
let body = if self.check(&token::OpenDelim(token::Brace)) { let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
self.parse_mac_args()? // `MacBody` self.parse_mac_args()? // `MacBody`
} else if self.check(&token::OpenDelim(token::Paren)) { } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let params = self.parse_token_tree(); // `MacParams` let params = self.parse_token_tree(); // `MacParams`
let pspan = params.span(); let pspan = params.span();
if !self.check(&token::OpenDelim(token::Brace)) { if !self.check(&token::OpenDelim(Delimiter::Brace)) {
return self.unexpected(); return self.unexpected();
} }
let body = self.parse_token_tree(); // `MacBody` let body = self.parse_token_tree(); // `MacBody`
@ -1924,7 +1925,7 @@ fn parse_fn_body(
self.expect_semi()?; self.expect_semi()?;
*sig_hi = self.prev_token.span; *sig_hi = self.prev_token.span;
(Vec::new(), None) (Vec::new(), None)
} else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() { } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))? self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))?
} else if self.token.kind == token::Eq { } else if self.token.kind == token::Eq {
// Recover `fn foo() = $expr;`. // Recover `fn foo() = $expr;`.
@ -1943,12 +1944,12 @@ fn parse_fn_body(
(Vec::new(), Some(self.mk_block_err(span))) (Vec::new(), Some(self.mk_block_err(span)))
} else { } else {
let expected = if req_body { let expected = if req_body {
&[token::OpenDelim(token::Brace)][..] &[token::OpenDelim(Delimiter::Brace)][..]
} else { } else {
&[token::Semi, token::OpenDelim(token::Brace)] &[token::Semi, token::OpenDelim(Delimiter::Brace)]
}; };
if let Err(mut err) = self.expected_one_of_not_found(&[], &expected) { if let Err(mut err) = self.expected_one_of_not_found(&[], &expected) {
if self.token.kind == token::CloseDelim(token::Brace) { if self.token.kind == token::CloseDelim(Delimiter::Brace) {
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
// the AST for typechecking. // the AST for typechecking.
err.span_label(ident.span, "while parsing this `fn`"); err.span_label(ident.span, "while parsing this `fn`");
@ -2164,7 +2165,7 @@ fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, Vec<Param>> {
e.emit(); e.emit();
let lo = p.prev_token.span; let lo = p.prev_token.span;
// Skip every token until next possible arg or end. // Skip every token until next possible arg or end.
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
// Create a placeholder argument for proper arg count (issue #34264). // Create a placeholder argument for proper arg count (issue #34264).
Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)))) Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span))))
}); });
@ -2220,7 +2221,7 @@ fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResu
let mut ty = this.parse_ty_for_param(); let mut ty = this.parse_ty_for_param();
if ty.is_ok() if ty.is_ok()
&& this.token != token::Comma && this.token != token::Comma
&& this.token != token::CloseDelim(token::Paren) && this.token != token::CloseDelim(Delimiter::Parenthesis)
{ {
// This wasn't actually a type, but a pattern looking like a type, // This wasn't actually a type, but a pattern looking like a type,
// so we are going to rollback and re-parse for recovery. // so we are going to rollback and re-parse for recovery.

View file

@ -19,7 +19,7 @@
pub use path::PathStyle; pub use path::PathStyle;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::AttributesData; use rustc_ast::tokenstream::AttributesData;
use rustc_ast::tokenstream::{self, DelimSpan, Spacing}; use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::tokenstream::{TokenStream, TokenTree};
@ -244,12 +244,12 @@ struct TokenCursor {
#[derive(Clone)] #[derive(Clone)]
struct TokenCursorFrame { struct TokenCursorFrame {
delim_sp: Option<(DelimToken, DelimSpan)>, delim_sp: Option<(Delimiter, DelimSpan)>,
tree_cursor: tokenstream::Cursor, tree_cursor: tokenstream::Cursor,
} }
impl TokenCursorFrame { impl TokenCursorFrame {
fn new(delim_sp: Option<(DelimToken, DelimSpan)>, tts: TokenStream) -> Self { fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self {
TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() } TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
} }
} }
@ -263,8 +263,8 @@ fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
#[inline(always)] #[inline(always)]
fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) { fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
loop { loop {
// FIXME: we currently don't return `NoDelim` open/close delims. To fix #67062 we will // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
// need to, whereupon the `delim != DelimToken::NoDelim` conditions below can be // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
// removed. // removed.
if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() { if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
match tree { match tree {
@ -278,14 +278,14 @@ fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
// Set `open_delim` to true here because we deal with it immediately. // Set `open_delim` to true here because we deal with it immediately.
let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone()); let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
self.stack.push(mem::replace(&mut self.frame, frame)); self.stack.push(mem::replace(&mut self.frame, frame));
if delim != DelimToken::NoDelim { if delim != Delimiter::Invisible {
return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone); return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
} }
// No open delimeter to return; continue on to the next iteration. // No open delimeter to return; continue on to the next iteration.
} }
}; };
} else if let Some(frame) = self.stack.pop() { } else if let Some(frame) = self.stack.pop() {
if let Some((delim, span)) = self.frame.delim_sp && delim != DelimToken::NoDelim { if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible {
self.frame = frame; self.frame = frame;
return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone); return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
} }
@ -314,7 +314,7 @@ fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token
let delim_span = DelimSpan::from_single(span); let delim_span = DelimSpan::from_single(span);
let body = TokenTree::Delimited( let body = TokenTree::Delimited(
delim_span, delim_span,
token::Bracket, Delimiter::Bracket,
[ [
TokenTree::token(token::Ident(sym::doc, false), span), TokenTree::token(token::Ident(sym::doc, false), span),
TokenTree::token(token::Eq, span), TokenTree::token(token::Eq, span),
@ -626,7 +626,7 @@ fn check_inline_const(&self, dist: usize) -> bool {
self.is_keyword_ahead(dist, &[kw::Const]) self.is_keyword_ahead(dist, &[kw::Const])
&& self.look_ahead(dist + 1, |t| match t.kind { && self.look_ahead(dist + 1, |t| match t.kind {
token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)), token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
token::OpenDelim(DelimToken::Brace) => true, token::OpenDelim(Delimiter::Brace) => true,
_ => false, _ => false,
}) })
} }
@ -954,7 +954,7 @@ fn parse_unspanned_seq<T>(
fn parse_delim_comma_seq<T>( fn parse_delim_comma_seq<T>(
&mut self, &mut self,
delim: DelimToken, delim: Delimiter,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> { ) -> PResult<'a, (Vec<T>, bool)> {
self.parse_unspanned_seq( self.parse_unspanned_seq(
@ -969,7 +969,7 @@ fn parse_paren_comma_seq<T>(
&mut self, &mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> { ) -> PResult<'a, (Vec<T>, bool)> {
self.parse_delim_comma_seq(token::Paren, f) self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
} }
/// Advance the parser by one token using provided token as the next one. /// Advance the parser by one token using provided token as the next one.
@ -1005,7 +1005,7 @@ pub fn bump(&mut self) {
} }
debug_assert!(!matches!( debug_assert!(!matches!(
next.0.kind, next.0.kind,
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
)); ));
self.inlined_bump_with(next) self.inlined_bump_with(next)
} }
@ -1018,10 +1018,10 @@ pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R
} }
let frame = &self.token_cursor.frame; let frame = &self.token_cursor.frame;
if let Some((delim, span)) = frame.delim_sp && delim != DelimToken::NoDelim { if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible {
let all_normal = (0..dist).all(|i| { let all_normal = (0..dist).all(|i| {
let token = frame.tree_cursor.look_ahead(i); let token = frame.tree_cursor.look_ahead(i);
!matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _))) !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
}); });
if all_normal { if all_normal {
return match frame.tree_cursor.look_ahead(dist - 1) { return match frame.tree_cursor.look_ahead(dist - 1) {
@ -1043,7 +1043,7 @@ pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R
token = cursor.next(/* desugar_doc_comments */ false).0; token = cursor.next(/* desugar_doc_comments */ false).0;
if matches!( if matches!(
token.kind, token.kind,
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
) { ) {
continue; continue;
} }
@ -1079,7 +1079,7 @@ fn parse_unsafety(&mut self) -> Unsafe {
/// Parses constness: `const` or nothing. /// Parses constness: `const` or nothing.
fn parse_constness(&mut self) -> Const { fn parse_constness(&mut self) -> Const {
// Avoid const blocks to be parsed as const items // Avoid const blocks to be parsed as const items
if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace)) if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
&& self.eat_keyword(kw::Const) && self.eat_keyword(kw::Const)
{ {
Const::Yes(self.prev_token.uninterpolated_span()) Const::Yes(self.prev_token.uninterpolated_span())
@ -1142,9 +1142,9 @@ fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> { fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
Ok( Ok(
if self.check(&token::OpenDelim(DelimToken::Paren)) if self.check(&token::OpenDelim(Delimiter::Parenthesis))
|| self.check(&token::OpenDelim(DelimToken::Bracket)) || self.check(&token::OpenDelim(Delimiter::Bracket))
|| self.check(&token::OpenDelim(DelimToken::Brace)) || self.check(&token::OpenDelim(Delimiter::Brace))
{ {
match self.parse_token_tree() { match self.parse_token_tree() {
TokenTree::Delimited(dspan, delim, tokens) => TokenTree::Delimited(dspan, delim, tokens) =>
@ -1288,7 +1288,7 @@ pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibilit
} }
let lo = self.prev_token.span; let lo = self.prev_token.span;
if self.check(&token::OpenDelim(token::Paren)) { if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// We don't `self.bump()` the `(` yet because this might be a struct definition where // We don't `self.bump()` the `(` yet because this might be a struct definition where
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
@ -1299,7 +1299,7 @@ pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibilit
// Parse `pub(crate)`. // Parse `pub(crate)`.
self.bump(); // `(` self.bump(); // `(`
self.bump(); // `crate` self.bump(); // `crate`
self.expect(&token::CloseDelim(token::Paren))?; // `)` self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
let vis = VisibilityKind::Crate(CrateSugar::PubCrate); let vis = VisibilityKind::Crate(CrateSugar::PubCrate);
return Ok(Visibility { return Ok(Visibility {
span: lo.to(self.prev_token.span), span: lo.to(self.prev_token.span),
@ -1311,20 +1311,20 @@ pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibilit
self.bump(); // `(` self.bump(); // `(`
self.bump(); // `in` self.bump(); // `in`
let path = self.parse_path(PathStyle::Mod)?; // `path` let path = self.parse_path(PathStyle::Mod)?; // `path`
self.expect(&token::CloseDelim(token::Paren))?; // `)` self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
return Ok(Visibility { return Ok(Visibility {
span: lo.to(self.prev_token.span), span: lo.to(self.prev_token.span),
kind: vis, kind: vis,
tokens: None, tokens: None,
}); });
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
&& self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower]) && self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
{ {
// Parse `pub(self)` or `pub(super)`. // Parse `pub(self)` or `pub(super)`.
self.bump(); // `(` self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?; // `super`/`self` let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
self.expect(&token::CloseDelim(token::Paren))?; // `)` self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
return Ok(Visibility { return Ok(Visibility {
span: lo.to(self.prev_token.span), span: lo.to(self.prev_token.span),
@ -1346,7 +1346,7 @@ pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibilit
fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> { fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
self.bump(); // `(` self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?; let path = self.parse_path(PathStyle::Mod)?;
self.expect(&token::CloseDelim(token::Paren))?; // `)` self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
let msg = "incorrect visibility restriction"; let msg = "incorrect visibility restriction";
let suggestion = r##"some possible visibility restrictions are: let suggestion = r##"some possible visibility restrictions are:
@ -1413,7 +1413,7 @@ pub fn collect_tokens_no_attrs<R: AstLike>(
fn is_import_coupler(&mut self) -> bool { fn is_import_coupler(&mut self) -> bool {
self.check(&token::ModSep) self.check(&token::ModSep)
&& self.look_ahead(1, |t| { && self.look_ahead(1, |t| {
*t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star) *t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
}) })
} }

View file

@ -1,5 +1,5 @@
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, NonterminalKind, Token}; use rustc_ast::token::{self, Delimiter, NonterminalKind, Token};
use rustc_ast::AstLike; use rustc_ast::AstLike;
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::PResult; use rustc_errors::PResult;
@ -43,7 +43,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
_ => token.can_begin_type(), _ => token.can_begin_type(),
}, },
NonterminalKind::Block => match token.kind { NonterminalKind::Block => match token.kind {
token::OpenDelim(token::Brace) => true, token::OpenDelim(Delimiter::Brace) => true,
token::Interpolated(ref nt) => !matches!( token::Interpolated(ref nt) => !matches!(
**nt, **nt,
token::NtItem(_) token::NtItem(_)
@ -67,8 +67,8 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => { NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
match token.kind { match token.kind {
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
token::OpenDelim(token::Paren) | // tuple pattern token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
token::OpenDelim(token::Bracket) | // slice pattern token::OpenDelim(Delimiter::Bracket) | // slice pattern
token::BinOp(token::And) | // reference token::BinOp(token::And) | // reference
token::BinOp(token::Minus) | // negative literal token::BinOp(token::Minus) | // negative literal
token::AndAnd | // double reference token::AndAnd | // double reference

View file

@ -2,7 +2,7 @@
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor}; use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token::{self, Delimiter};
use rustc_ast::{ use rustc_ast::{
self as ast, AttrVec, Attribute, BindingMode, Expr, ExprKind, MacCall, Mutability, Pat, self as ast, AttrVec, Attribute, BindingMode, Expr, ExprKind, MacCall, Mutability, Pat,
PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax, PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
@ -260,9 +260,9 @@ fn recover_trailing_vert(&mut self, lo: Option<Span>) -> bool {
| token::Semi // e.g. `let a |;`. | token::Semi // e.g. `let a |;`.
| token::Colon // e.g. `let a | :`. | token::Colon // e.g. `let a | :`.
| token::Comma // e.g. `let (a |,)`. | token::Comma // e.g. `let (a |,)`.
| token::CloseDelim(token::Bracket) // e.g. `let [a | ]`. | token::CloseDelim(Delimiter::Bracket) // e.g. `let [a | ]`.
| token::CloseDelim(token::Paren) // e.g. `let (a | )`. | token::CloseDelim(Delimiter::Parenthesis) // e.g. `let (a | )`.
| token::CloseDelim(token::Brace) // e.g. `let A { f: a | }`. | token::CloseDelim(Delimiter::Brace) // e.g. `let A { f: a | }`.
) )
}); });
match (is_end_ahead, &self.token.kind) { match (is_end_ahead, &self.token.kind) {
@ -323,11 +323,11 @@ fn parse_pat_with_range_pat(
let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd { let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
self.parse_pat_deref(expected)? self.parse_pat_deref(expected)?
} else if self.check(&token::OpenDelim(token::Paren)) { } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
self.parse_pat_tuple_or_parens()? self.parse_pat_tuple_or_parens()?
} else if self.check(&token::OpenDelim(token::Bracket)) { } else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
// Parse `[pat, pat,...]` as a slice pattern. // Parse `[pat, pat,...]` as a slice pattern.
let (pats, _) = self.parse_delim_comma_seq(token::Bracket, |p| { let (pats, _) = self.parse_delim_comma_seq(Delimiter::Bracket, |p| {
p.parse_pat_allow_top_alt( p.parse_pat_allow_top_alt(
None, None,
RecoverComma::No, RecoverComma::No,
@ -389,9 +389,9 @@ fn parse_pat_with_range_pat(
} else if let Some(form) = self.parse_range_end() { } else if let Some(form) = self.parse_range_end() {
let begin = self.mk_expr(span, ExprKind::Path(qself, path), AttrVec::new()); let begin = self.mk_expr(span, ExprKind::Path(qself, path), AttrVec::new());
self.parse_pat_range_begin_with(begin, form)? self.parse_pat_range_begin_with(begin, form)?
} else if self.check(&token::OpenDelim(token::Brace)) { } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
self.parse_pat_struct(qself, path)? self.parse_pat_struct(qself, path)?
} else if self.check(&token::OpenDelim(token::Paren)) { } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
self.parse_pat_tuple_struct(qself, path)? self.parse_pat_tuple_struct(qself, path)?
} else { } else {
PatKind::Path(qself, path) PatKind::Path(qself, path)
@ -845,8 +845,8 @@ fn can_be_ident_pat(&mut self) -> bool {
// Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`. // Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`.
&& !self.token.is_keyword(kw::In) && !self.token.is_keyword(kw::In)
// Try to do something more complex? // Try to do something more complex?
&& self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(token::Paren) // A tuple struct pattern. && self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern.
| token::OpenDelim(token::Brace) // A struct pattern. | token::OpenDelim(Delimiter::Brace) // A struct pattern.
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
| token::ModSep // A tuple / struct variant pattern. | token::ModSep // A tuple / struct variant pattern.
| token::Not)) // A macro expanding to a pattern. | token::Not)) // A macro expanding to a pattern.
@ -868,7 +868,7 @@ fn parse_pat_ident(&mut self, binding_mode: BindingMode) -> PResult<'a, PatKind>
// This shortly leads to a parse error. Note that if there is no explicit // This shortly leads to a parse error. Note that if there is no explicit
// binding mode then we do not end up here, because the lookahead // binding mode then we do not end up here, because the lookahead
// will direct us over to `parse_enum_variant()`. // will direct us over to `parse_enum_variant()`.
if self.token == token::OpenDelim(token::Paren) { if self.token == token::OpenDelim(Delimiter::Parenthesis) {
return Err(self return Err(self
.struct_span_err(self.prev_token.span, "expected identifier, found enum pattern")); .struct_span_err(self.prev_token.span, "expected identifier, found enum pattern"));
} }
@ -917,7 +917,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<PatField>, bool)> {
let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None; let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None;
let mut etc_span = None; let mut etc_span = None;
while self.token != token::CloseDelim(token::Brace) { while self.token != token::CloseDelim(Delimiter::Brace) {
let attrs = match self.parse_outer_attributes() { let attrs = match self.parse_outer_attributes() {
Ok(attrs) => attrs, Ok(attrs) => attrs,
Err(err) => { Err(err) => {
@ -946,7 +946,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<PatField>, bool)> {
self.recover_one_fewer_dotdot(); self.recover_one_fewer_dotdot();
self.bump(); // `..` || `...` self.bump(); // `..` || `...`
if self.token == token::CloseDelim(token::Brace) { if self.token == token::CloseDelim(Delimiter::Brace) {
etc_span = Some(etc_sp); etc_span = Some(etc_sp);
break; break;
} }
@ -970,7 +970,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<PatField>, bool)> {
} }
etc_span = Some(etc_sp.until(self.token.span)); etc_span = Some(etc_sp.until(self.token.span));
if self.token == token::CloseDelim(token::Brace) { if self.token == token::CloseDelim(Delimiter::Brace) {
// If the struct looks otherwise well formed, recover and continue. // If the struct looks otherwise well formed, recover and continue.
if let Some(sp) = comma_sp { if let Some(sp) = comma_sp {
err.span_suggestion_short( err.span_suggestion_short(

View file

@ -2,7 +2,7 @@
use super::{Parser, Restrictions, TokenType}; use super::{Parser, Restrictions, TokenType};
use crate::maybe_whole; use crate::maybe_whole;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token}; use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::{ use rustc_ast::{
self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocConstraint, self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocConstraint,
AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs, AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
@ -236,14 +236,14 @@ pub(super) fn parse_path_segment(
token.kind, token.kind,
token::Lt token::Lt
| token::BinOp(token::Shl) | token::BinOp(token::Shl)
| token::OpenDelim(token::Paren) | token::OpenDelim(Delimiter::Parenthesis)
| token::LArrow | token::LArrow
) )
}; };
let check_args_start = |this: &mut Self| { let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(&[ this.expected_tokens.extend_from_slice(&[
TokenType::Token(token::Lt), TokenType::Token(token::Lt),
TokenType::Token(token::OpenDelim(token::Paren)), TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
]); ]);
is_args_start(&this.token) is_args_start(&this.token)
}; };
@ -639,7 +639,7 @@ pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool
/// the caller. /// the caller.
pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> { pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
// Parse const argument. // Parse const argument.
let value = if let token::OpenDelim(token::Brace) = self.token.kind { let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind {
self.parse_block_expr( self.parse_block_expr(
None, None,
self.token.span, self.token.span,
@ -667,7 +667,8 @@ pub(super) fn parse_generic_arg(
GenericArg::Const(self.parse_const_arg()?) GenericArg::Const(self.parse_const_arg()?)
} else if self.check_type() { } else if self.check_type() {
// Parse type argument. // Parse type argument.
let is_const_fn = self.look_ahead(1, |t| t.kind == token::OpenDelim(token::Paren)); let is_const_fn =
self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis));
let mut snapshot = self.create_snapshot_for_diagnostic(); let mut snapshot = self.create_snapshot_for_diagnostic();
match self.parse_ty() { match self.parse_ty() {
Ok(ty) => GenericArg::Type(ty), Ok(ty) => GenericArg::Type(ty),

View file

@ -11,7 +11,7 @@
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, TokenKind}; use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::util::classify; use rustc_ast::util::classify;
use rustc_ast::{ use rustc_ast::{
AstLike, AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle, AstLike, AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle,
@ -92,7 +92,7 @@ pub fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<
// Do not attempt to parse an expression if we're done here. // Do not attempt to parse an expression if we're done here.
self.error_outer_attrs(&attrs.take_for_recovery()); self.error_outer_attrs(&attrs.take_for_recovery());
self.mk_stmt(lo, StmtKind::Empty) self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(token::Brace) { } else if self.token != token::CloseDelim(Delimiter::Brace) {
// Remainder are line-expr stmts. // Remainder are line-expr stmts.
let e = if force_collect == ForceCollect::Yes { let e = if force_collect == ForceCollect::Yes {
self.collect_tokens_no_attrs(|this| { self.collect_tokens_no_attrs(|this| {
@ -131,7 +131,7 @@ fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a,
} }
} }
let expr = if this.eat(&token::OpenDelim(token::Brace)) { let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
this.parse_struct_expr(None, path, AttrVec::new(), true)? this.parse_struct_expr(None, path, AttrVec::new(), true)?
} else { } else {
let hi = this.prev_token.span; let hi = this.prev_token.span;
@ -165,7 +165,7 @@ fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResu
let hi = self.prev_token.span; let hi = self.prev_token.span;
let style = match delim { let style = match delim {
Some(token::Brace) => MacStmtStyle::Braces, Some(Delimiter::Brace) => MacStmtStyle::Braces,
Some(_) => MacStmtStyle::NoBraces, Some(_) => MacStmtStyle::NoBraces,
None => unreachable!(), None => unreachable!(),
}; };
@ -434,7 +434,7 @@ fn error_block_no_opening_brace_msg(
// If the next token is an open brace (e.g., `if a b {`), the place- // If the next token is an open brace (e.g., `if a b {`), the place-
// inside-a-block suggestion would be more likely wrong than right. // inside-a-block suggestion would be more likely wrong than right.
Ok(Some(_)) Ok(Some(_))
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) if self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace))
|| do_not_suggest_help => {} || do_not_suggest_help => {}
// Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836). // Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836).
Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {} Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
@ -488,7 +488,7 @@ pub(super) fn parse_block_common(
maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
self.maybe_recover_unexpected_block_label(); self.maybe_recover_unexpected_block_label();
if !self.eat(&token::OpenDelim(token::Brace)) { if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
return self.error_block_no_opening_brace(); return self.error_block_no_opening_brace();
} }
@ -509,7 +509,7 @@ pub(super) fn parse_block_common(
recover: AttemptLocalParseRecovery, recover: AttemptLocalParseRecovery,
) -> PResult<'a, P<Block>> { ) -> PResult<'a, P<Block>> {
let mut stmts = vec![]; let mut stmts = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) { while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
if self.token == token::Eof { if self.token == token::Eof {
break; break;
} }
@ -553,7 +553,7 @@ pub fn parse_full_stmt(
{ {
// Just check for errors and recover; do not eat semicolon yet. // Just check for errors and recover; do not eat semicolon yet.
if let Err(mut e) = if let Err(mut e) =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)]) self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)])
{ {
if let TokenKind::DocComment(..) = self.token.kind { if let TokenKind::DocComment(..) = self.token.kind {
if let Ok(snippet) = self.span_to_snippet(self.token.span) { if let Ok(snippet) = self.span_to_snippet(self.token.span) {

View file

@ -3,7 +3,7 @@
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::{ use rustc_ast::{
self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime, self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind, MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
@ -249,14 +249,14 @@ fn parse_ty_common(
let lo = self.token.span; let lo = self.token.span;
let mut impl_dyn_multi = false; let mut impl_dyn_multi = false;
let kind = if self.check(&token::OpenDelim(token::Paren)) { let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
self.parse_ty_tuple_or_parens(lo, allow_plus)? self.parse_ty_tuple_or_parens(lo, allow_plus)?
} else if self.eat(&token::Not) { } else if self.eat(&token::Not) {
// Never type `!` // Never type `!`
TyKind::Never TyKind::Never
} else if self.eat(&token::BinOp(token::Star)) { } else if self.eat(&token::BinOp(token::Star)) {
self.parse_ty_ptr()? self.parse_ty_ptr()?
} else if self.eat(&token::OpenDelim(token::Bracket)) { } else if self.eat(&token::OpenDelim(Delimiter::Bracket)) {
self.parse_array_or_slice_ty()? self.parse_array_or_slice_ty()?
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) { } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
// Reference // Reference
@ -409,7 +409,7 @@ fn parse_array_or_slice_ty(&mut self) -> PResult<'a, TyKind> {
let elt_ty = match self.parse_ty() { let elt_ty = match self.parse_ty() {
Ok(ty) => ty, Ok(ty) => ty,
Err(mut err) Err(mut err)
if self.look_ahead(1, |t| t.kind == token::CloseDelim(token::Bracket)) if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket))
| self.look_ahead(1, |t| t.kind == token::Semi) => | self.look_ahead(1, |t| t.kind == token::Semi) =>
{ {
// Recover from `[LIT; EXPR]` and `[LIT]` // Recover from `[LIT; EXPR]` and `[LIT]`
@ -422,14 +422,14 @@ fn parse_array_or_slice_ty(&mut self) -> PResult<'a, TyKind> {
let ty = if self.eat(&token::Semi) { let ty = if self.eat(&token::Semi) {
let mut length = self.parse_anon_const_expr()?; let mut length = self.parse_anon_const_expr()?;
if let Err(e) = self.expect(&token::CloseDelim(token::Bracket)) { if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
// Try to recover from `X<Y, ...>` when `X::<Y, ...>` works // Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?; self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
self.expect(&token::CloseDelim(token::Bracket))?; self.expect(&token::CloseDelim(Delimiter::Bracket))?;
} }
TyKind::Array(elt_ty, length) TyKind::Array(elt_ty, length)
} else { } else {
self.expect(&token::CloseDelim(token::Bracket))?; self.expect(&token::CloseDelim(Delimiter::Bracket))?;
TyKind::Slice(elt_ty) TyKind::Slice(elt_ty)
}; };
@ -492,9 +492,9 @@ fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
// Parses the `typeof(EXPR)`. // Parses the `typeof(EXPR)`.
// To avoid ambiguity, the type is surrounded by parentheses. // To avoid ambiguity, the type is surrounded by parentheses.
fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> { fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
self.expect(&token::OpenDelim(token::Paren))?; self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let expr = self.parse_anon_const_expr()?; let expr = self.parse_anon_const_expr()?;
self.expect(&token::CloseDelim(token::Paren))?; self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
Ok(TyKind::Typeof(expr)) Ok(TyKind::Typeof(expr))
} }
@ -672,7 +672,7 @@ fn can_begin_bound(&mut self) -> bool {
|| self.check(&token::Question) || self.check(&token::Question)
|| self.check(&token::Tilde) || self.check(&token::Tilde)
|| self.check_keyword(kw::For) || self.check_keyword(kw::For)
|| self.check(&token::OpenDelim(token::Paren)) || self.check(&token::OpenDelim(Delimiter::Parenthesis))
} }
fn error_negative_bounds( fn error_negative_bounds(
@ -713,7 +713,7 @@ fn error_negative_bounds(
fn parse_generic_bound(&mut self) -> PResult<'a, Result<GenericBound, Span>> { fn parse_generic_bound(&mut self) -> PResult<'a, Result<GenericBound, Span>> {
let anchor_lo = self.prev_token.span; let anchor_lo = self.prev_token.span;
let lo = self.token.span; let lo = self.token.span;
let has_parens = self.eat(&token::OpenDelim(token::Paren)); let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
let inner_lo = self.token.span; let inner_lo = self.token.span;
let is_negative = self.eat(&token::Not); let is_negative = self.eat(&token::Not);
@ -766,7 +766,7 @@ fn error_lt_bound_with_modifiers(&self, modifiers: BoundModifiers) {
/// Recover on `('lifetime)` with `(` already eaten. /// Recover on `('lifetime)` with `(` already eaten.
fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> { fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> {
let inner_span = inner_lo.to(self.prev_token.span); let inner_span = inner_lo.to(self.prev_token.span);
self.expect(&token::CloseDelim(token::Paren))?; self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
let mut err = self.struct_span_err( let mut err = self.struct_span_err(
lo.to(self.prev_token.span), lo.to(self.prev_token.span),
"parenthesized lifetime bounds are not supported", "parenthesized lifetime bounds are not supported",
@ -829,7 +829,7 @@ fn parse_generic_ty_bound(
// suggestion is given. // suggestion is given.
let bounds = vec![]; let bounds = vec![];
self.parse_remaining_bounds(bounds, true)?; self.parse_remaining_bounds(bounds, true)?;
self.expect(&token::CloseDelim(token::Paren))?; self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
let sp = vec![lo, self.prev_token.span]; let sp = vec![lo, self.prev_token.span];
let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect(); let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect();
self.struct_span_err(sp, "incorrect braces around trait bounds") self.struct_span_err(sp, "incorrect braces around trait bounds")
@ -840,7 +840,7 @@ fn parse_generic_ty_bound(
) )
.emit(); .emit();
} else { } else {
self.expect(&token::CloseDelim(token::Paren))?; self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
} }
} }

View file

@ -1,6 +1,6 @@
use crate::parse::ParseSess; use crate::parse::ParseSess;
use crate::session::Session; use crate::session::Session;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token}; use rustc_ast::token::{self, Delimiter, Nonterminal, Token};
use rustc_ast::tokenstream::CanSynthesizeMissingTokens; use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::profiling::VerboseTimingGuard; use rustc_data_structures::profiling::VerboseTimingGuard;
@ -137,7 +137,7 @@ pub fn process_token(&mut self, token: Token) -> TokenStream {
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens); let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
TokenTree::Delimited( TokenTree::Delimited(
DelimSpan::from_single(token.span), DelimSpan::from_single(token.span),
DelimToken::NoDelim, Delimiter::Invisible,
self.process_token_stream(tts), self.process_token_stream(tts),
) )
.into() .into()

View file

@ -704,10 +704,10 @@ pub enum Delimiter {
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
Bracket, Bracket,
/// `Ø ... Ø` /// `Ø ... Ø`
/// An implicit delimiter, that may, for example, appear around tokens coming from a /// An invisible delimiter, that may, for example, appear around tokens coming from a
/// "macro variable" `$var`. It is important to preserve operator priorities in cases like /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
/// `$var * 3` where `$var` is `1 + 2`. /// `$var * 3` where `$var` is `1 + 2`.
/// Implicit delimiters might not survive roundtrip of a token stream through a string. /// Invisible delimiters might not survive roundtrip of a token stream through a string.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
None, None,
} }

View file

@ -1,4 +1,4 @@
use rustc_ast::token::{self, BinOpToken, DelimToken}; use rustc_ast::token::{self, BinOpToken, Delimiter};
use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast_pretty::pprust::state::State as Printer; use rustc_ast_pretty::pprust::state::State as Printer;
use rustc_ast_pretty::pprust::PrintState; use rustc_ast_pretty::pprust::PrintState;
@ -104,11 +104,11 @@ fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim)); let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
printer.word(open_delim); printer.word(open_delim);
if !tts.is_empty() { if !tts.is_empty() {
if *delim == DelimToken::Brace { if *delim == Delimiter::Brace {
printer.space(); printer.space();
} }
print_tts(printer, tts); print_tts(printer, tts);
if *delim == DelimToken::Brace { if *delim == Delimiter::Brace {
printer.space(); printer.space();
} }
} }
@ -162,9 +162,9 @@ enum State {
(_, _) => (true, Other), (_, _) => (true, Other),
}, },
TokenTree::Delimited(_, delim, _) => match (state, delim) { TokenTree::Delimited(_, delim, _) => match (state, delim) {
(Dollar, DelimToken::Paren) => (false, DollarParen), (Dollar, Delimiter::Parenthesis) => (false, DollarParen),
(Pound | PoundBang, DelimToken::Bracket) => (false, Other), (Pound | PoundBang, Delimiter::Bracket) => (false, Other),
(Ident, DelimToken::Paren | DelimToken::Bracket) => (false, Other), (Ident, Delimiter::Parenthesis | Delimiter::Bracket) => (false, Other),
(_, _) => (true, Other), (_, _) => (true, Other),
}, },
}; };

View file

@ -3,7 +3,7 @@
// compile-flags: -Z span-debug // compile-flags: -Z span-debug
// edition:2018 // edition:2018
// //
// Tests the pretty-printing behavior of inserting `NoDelim` groups // Tests the pretty-printing behavior of inserting `Invisible`-delimited groups
#![no_std] // Don't load unnecessary hygiene information from std #![no_std] // Don't load unnecessary hygiene information from std
extern crate std; extern crate std;

View file

@ -2,7 +2,7 @@
use std::cmp::min; use std::cmp::min;
use itertools::Itertools; use itertools::Itertools;
use rustc_ast::token::{DelimToken, LitKind}; use rustc_ast::token::{Delimiter, LitKind};
use rustc_ast::{ast, ptr}; use rustc_ast::{ast, ptr};
use rustc_span::{BytePos, Span}; use rustc_span::{BytePos, Span};
@ -412,7 +412,7 @@ pub(crate) fn rewrite_array<'a, T: 'a + IntoOverflowableItem<'a>>(
context: &'a RewriteContext<'_>, context: &'a RewriteContext<'_>,
shape: Shape, shape: Shape,
force_separator_tactic: Option<SeparatorTactic>, force_separator_tactic: Option<SeparatorTactic>,
delim_token: Option<DelimToken>, delim_token: Option<Delimiter>,
) -> Option<String> { ) -> Option<String> {
overflow::rewrite_with_square_brackets( overflow::rewrite_with_square_brackets(
context, context,

View file

@ -12,7 +12,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::panic::{catch_unwind, AssertUnwindSafe}; use std::panic::{catch_unwind, AssertUnwindSafe};
use rustc_ast::token::{BinOpToken, DelimToken, Token, TokenKind}; use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{Cursor, Spacing, TokenStream, TokenTree}; use rustc_ast::tokenstream::{Cursor, Spacing, TokenStream, TokenTree};
use rustc_ast::{ast, ptr}; use rustc_ast::{ast, ptr};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
@ -203,7 +203,7 @@ fn rewrite_macro_inner(
let is_forced_bracket = FORCED_BRACKET_MACROS.contains(&&macro_name[..]); let is_forced_bracket = FORCED_BRACKET_MACROS.contains(&&macro_name[..]);
let style = if is_forced_bracket && !is_nested_macro { let style = if is_forced_bracket && !is_nested_macro {
DelimToken::Bracket Delimiter::Bracket
} else { } else {
original_style original_style
}; };
@ -212,15 +212,15 @@ fn rewrite_macro_inner(
let has_comment = contains_comment(context.snippet(mac.span())); let has_comment = contains_comment(context.snippet(mac.span()));
if ts.is_empty() && !has_comment { if ts.is_empty() && !has_comment {
return match style { return match style {
DelimToken::Paren if position == MacroPosition::Item => { Delimiter::Parenthesis if position == MacroPosition::Item => {
Some(format!("{}();", macro_name)) Some(format!("{}();", macro_name))
} }
DelimToken::Bracket if position == MacroPosition::Item => { Delimiter::Bracket if position == MacroPosition::Item => {
Some(format!("{}[];", macro_name)) Some(format!("{}[];", macro_name))
} }
DelimToken::Paren => Some(format!("{}()", macro_name)), Delimiter::Parenthesis => Some(format!("{}()", macro_name)),
DelimToken::Bracket => Some(format!("{}[]", macro_name)), Delimiter::Bracket => Some(format!("{}[]", macro_name)),
DelimToken::Brace => Some(format!("{} {{}}", macro_name)), Delimiter::Brace => Some(format!("{} {{}}", macro_name)),
_ => unreachable!(), _ => unreachable!(),
}; };
} }
@ -260,7 +260,7 @@ fn rewrite_macro_inner(
} }
match style { match style {
DelimToken::Paren => { Delimiter::Parenthesis => {
// Handle special case: `vec!(expr; expr)` // Handle special case: `vec!(expr; expr)`
if vec_with_semi { if vec_with_semi {
handle_vec_semi(context, shape, arg_vec, macro_name, style) handle_vec_semi(context, shape, arg_vec, macro_name, style)
@ -286,7 +286,7 @@ fn rewrite_macro_inner(
}) })
} }
} }
DelimToken::Bracket => { Delimiter::Bracket => {
// Handle special case: `vec![expr; expr]` // Handle special case: `vec![expr; expr]`
if vec_with_semi { if vec_with_semi {
handle_vec_semi(context, shape, arg_vec, macro_name, style) handle_vec_semi(context, shape, arg_vec, macro_name, style)
@ -323,7 +323,7 @@ fn rewrite_macro_inner(
Some(format!("{}{}", rewrite, comma)) Some(format!("{}{}", rewrite, comma))
} }
} }
DelimToken::Brace => { Delimiter::Brace => {
// For macro invocations with braces, always put a space between // For macro invocations with braces, always put a space between
// the `macro_name!` and `{ /* macro_body */ }` but skip modifying // the `macro_name!` and `{ /* macro_body */ }` but skip modifying
// anything in between the braces (for now). // anything in between the braces (for now).
@ -342,11 +342,11 @@ fn handle_vec_semi(
shape: Shape, shape: Shape,
arg_vec: Vec<MacroArg>, arg_vec: Vec<MacroArg>,
macro_name: String, macro_name: String,
delim_token: DelimToken, delim_token: Delimiter,
) -> Option<String> { ) -> Option<String> {
let (left, right) = match delim_token { let (left, right) = match delim_token {
DelimToken::Paren => ("(", ")"), Delimiter::Parenthesis => ("(", ")"),
DelimToken::Bracket => ("[", "]"), Delimiter::Bracket => ("[", "]"),
_ => unreachable!(), _ => unreachable!(),
}; };
@ -528,7 +528,7 @@ enum MacroArgKind {
/// e.g., `$($foo: expr),*` /// e.g., `$($foo: expr),*`
Repeat( Repeat(
/// `()`, `[]` or `{}`. /// `()`, `[]` or `{}`.
DelimToken, Delimiter,
/// Inner arguments inside delimiters. /// Inner arguments inside delimiters.
Vec<ParsedMacroArg>, Vec<ParsedMacroArg>,
/// Something after the closing delimiter and the repeat token, if available. /// Something after the closing delimiter and the repeat token, if available.
@ -537,7 +537,7 @@ enum MacroArgKind {
Token, Token,
), ),
/// e.g., `[derive(Debug)]` /// e.g., `[derive(Debug)]`
Delimited(DelimToken, Vec<ParsedMacroArg>), Delimited(Delimiter, Vec<ParsedMacroArg>),
/// A possible separator. e.g., `,` or `;`. /// A possible separator. e.g., `,` or `;`.
Separator(String, String), Separator(String, String),
/// Other random stuff that does not fit to other kinds. /// Other random stuff that does not fit to other kinds.
@ -547,22 +547,22 @@ enum MacroArgKind {
fn delim_token_to_str( fn delim_token_to_str(
context: &RewriteContext<'_>, context: &RewriteContext<'_>,
delim_token: DelimToken, delim_token: Delimiter,
shape: Shape, shape: Shape,
use_multiple_lines: bool, use_multiple_lines: bool,
inner_is_empty: bool, inner_is_empty: bool,
) -> (String, String) { ) -> (String, String) {
let (lhs, rhs) = match delim_token { let (lhs, rhs) = match delim_token {
DelimToken::Paren => ("(", ")"), Delimiter::Parenthesis => ("(", ")"),
DelimToken::Bracket => ("[", "]"), Delimiter::Bracket => ("[", "]"),
DelimToken::Brace => { Delimiter::Brace => {
if inner_is_empty || use_multiple_lines { if inner_is_empty || use_multiple_lines {
("{", "}") ("{", "}")
} else { } else {
("{ ", " }") ("{ ", " }")
} }
} }
DelimToken::NoDelim => unreachable!(), Delimiter::Invisible => unreachable!(),
}; };
if use_multiple_lines { if use_multiple_lines {
let indent_str = shape.indent.to_string_with_newline(context.config); let indent_str = shape.indent.to_string_with_newline(context.config);
@ -583,8 +583,8 @@ impl MacroArgKind {
fn starts_with_brace(&self) -> bool { fn starts_with_brace(&self) -> bool {
matches!( matches!(
*self, *self,
MacroArgKind::Repeat(DelimToken::Brace, _, _, _) MacroArgKind::Repeat(Delimiter::Brace, _, _, _)
| MacroArgKind::Delimited(DelimToken::Brace, _) | MacroArgKind::Delimited(Delimiter::Brace, _)
) )
} }
@ -753,7 +753,7 @@ fn add_meta_variable(&mut self, iter: &mut Cursor) -> Option<()> {
} }
} }
fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: DelimToken) { fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: Delimiter) {
self.result.push(ParsedMacroArg { self.result.push(ParsedMacroArg {
kind: MacroArgKind::Delimited(delim, inner), kind: MacroArgKind::Delimited(delim, inner),
}); });
@ -763,7 +763,7 @@ fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: DelimToken) {
fn add_repeat( fn add_repeat(
&mut self, &mut self,
inner: Vec<ParsedMacroArg>, inner: Vec<ParsedMacroArg>,
delim: DelimToken, delim: Delimiter,
iter: &mut Cursor, iter: &mut Cursor,
) -> Option<()> { ) -> Option<()> {
let mut buffer = String::new(); let mut buffer = String::new();
@ -1083,18 +1083,18 @@ pub(crate) fn convert_try_mac(
} }
} }
pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> DelimToken { pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> Delimiter {
let snippet = context.snippet(mac.span()); let snippet = context.snippet(mac.span());
let paren_pos = snippet.find_uncommented("(").unwrap_or(usize::max_value()); let paren_pos = snippet.find_uncommented("(").unwrap_or(usize::max_value());
let bracket_pos = snippet.find_uncommented("[").unwrap_or(usize::max_value()); let bracket_pos = snippet.find_uncommented("[").unwrap_or(usize::max_value());
let brace_pos = snippet.find_uncommented("{").unwrap_or(usize::max_value()); let brace_pos = snippet.find_uncommented("{").unwrap_or(usize::max_value());
if paren_pos < bracket_pos && paren_pos < brace_pos { if paren_pos < bracket_pos && paren_pos < brace_pos {
DelimToken::Paren Delimiter::Parenthesis
} else if bracket_pos < brace_pos { } else if bracket_pos < brace_pos {
DelimToken::Bracket Delimiter::Bracket
} else { } else {
DelimToken::Brace Delimiter::Brace
} }
} }
@ -1174,7 +1174,7 @@ struct Macro {
// rather than clone them, if we can make the borrowing work out. // rather than clone them, if we can make the borrowing work out.
struct MacroBranch { struct MacroBranch {
span: Span, span: Span,
args_paren_kind: DelimToken, args_paren_kind: Delimiter,
args: TokenStream, args: TokenStream,
body: Span, body: Span,
whole_body: Span, whole_body: Span,
@ -1188,7 +1188,7 @@ fn rewrite(
multi_branch_style: bool, multi_branch_style: bool,
) -> Option<String> { ) -> Option<String> {
// Only attempt to format function-like macros. // Only attempt to format function-like macros.
if self.args_paren_kind != DelimToken::Paren { if self.args_paren_kind != Delimiter::Parenthesis {
// FIXME(#1539): implement for non-sugared macros. // FIXME(#1539): implement for non-sugared macros.
return None; return None;
} }
@ -1350,18 +1350,18 @@ fn rewrite_macro_with_items(
items: &[MacroArg], items: &[MacroArg],
macro_name: &str, macro_name: &str,
shape: Shape, shape: Shape,
style: DelimToken, style: Delimiter,
position: MacroPosition, position: MacroPosition,
span: Span, span: Span,
) -> Option<String> { ) -> Option<String> {
let (opener, closer) = match style { let (opener, closer) = match style {
DelimToken::Paren => ("(", ")"), Delimiter::Parenthesis => ("(", ")"),
DelimToken::Bracket => ("[", "]"), Delimiter::Bracket => ("[", "]"),
DelimToken::Brace => (" {", "}"), Delimiter::Brace => (" {", "}"),
_ => return None, _ => return None,
}; };
let trailing_semicolon = match style { let trailing_semicolon = match style {
DelimToken::Paren | DelimToken::Bracket if position == MacroPosition::Item => ";", Delimiter::Parenthesis | Delimiter::Bracket if position == MacroPosition::Item => ";",
_ => "", _ => "",
}; };

View file

@ -3,7 +3,7 @@
use std::cmp::min; use std::cmp::min;
use itertools::Itertools; use itertools::Itertools;
use rustc_ast::token::DelimToken; use rustc_ast::token::Delimiter;
use rustc_ast::{ast, ptr}; use rustc_ast::{ast, ptr};
use rustc_span::Span; use rustc_span::Span;
@ -297,11 +297,11 @@ pub(crate) fn rewrite_with_square_brackets<'a, T: 'a + IntoOverflowableItem<'a>>
shape: Shape, shape: Shape,
span: Span, span: Span,
force_separator_tactic: Option<SeparatorTactic>, force_separator_tactic: Option<SeparatorTactic>,
delim_token: Option<DelimToken>, delim_token: Option<Delimiter>,
) -> Option<String> { ) -> Option<String> {
let (lhs, rhs) = match delim_token { let (lhs, rhs) = match delim_token {
Some(DelimToken::Paren) => ("(", ")"), Some(Delimiter::Parenthesis) => ("(", ")"),
Some(DelimToken::Brace) => ("{", "}"), Some(Delimiter::Brace) => ("{", "}"),
_ => ("[", "]"), _ => ("[", "]"),
}; };
Context::new( Context::new(

View file

@ -1,7 +1,7 @@
use std::panic::{catch_unwind, AssertUnwindSafe}; use std::panic::{catch_unwind, AssertUnwindSafe};
use rustc_ast::ast; use rustc_ast::ast;
use rustc_ast::token::{DelimToken, TokenKind}; use rustc_ast::token::{Delimiter, TokenKind};
use rustc_parse::parser::ForceCollect; use rustc_parse::parser::ForceCollect;
use rustc_span::symbol::kw; use rustc_span::symbol::kw;
@ -47,11 +47,11 @@ fn parse_cfg_if_inner<'a>(
.map_err(|_| "Failed to parse attributes")?; .map_err(|_| "Failed to parse attributes")?;
} }
if !parser.eat(&TokenKind::OpenDelim(DelimToken::Brace)) { if !parser.eat(&TokenKind::OpenDelim(Delimiter::Brace)) {
return Err("Expected an opening brace"); return Err("Expected an opening brace");
} }
while parser.token != TokenKind::CloseDelim(DelimToken::Brace) while parser.token != TokenKind::CloseDelim(Delimiter::Brace)
&& parser.token.kind != TokenKind::Eof && parser.token.kind != TokenKind::Eof
{ {
let item = match parser.parse_item(ForceCollect::No) { let item = match parser.parse_item(ForceCollect::No) {
@ -70,7 +70,7 @@ fn parse_cfg_if_inner<'a>(
} }
} }
if !parser.eat(&TokenKind::CloseDelim(DelimToken::Brace)) { if !parser.eat(&TokenKind::CloseDelim(Delimiter::Brace)) {
return Err("Expected a closing brace"); return Err("Expected a closing brace");
} }

View file

@ -1,4 +1,4 @@
use rustc_ast::token::{DelimToken, TokenKind}; use rustc_ast::token::{Delimiter, TokenKind};
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{ast, ptr}; use rustc_ast::{ast, ptr};
use rustc_parse::parser::{ForceCollect, Parser}; use rustc_parse::parser::{ForceCollect, Parser};
@ -81,7 +81,7 @@ fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
&& parser.look_ahead(1, |t| { && parser.look_ahead(1, |t| {
t.kind == TokenKind::Eof t.kind == TokenKind::Eof
|| t.kind == TokenKind::Comma || t.kind == TokenKind::Comma
|| t.kind == TokenKind::CloseDelim(DelimToken::NoDelim) || t.kind == TokenKind::CloseDelim(Delimiter::Invisible)
}) })
{ {
parser.bump(); parser.bump();
@ -97,7 +97,7 @@ fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
pub(crate) fn parse_macro_args( pub(crate) fn parse_macro_args(
context: &RewriteContext<'_>, context: &RewriteContext<'_>,
tokens: TokenStream, tokens: TokenStream,
style: DelimToken, style: Delimiter,
forced_bracket: bool, forced_bracket: bool,
) -> Option<ParsedMacroArgs> { ) -> Option<ParsedMacroArgs> {
let mut parser = build_parser(context, tokens); let mut parser = build_parser(context, tokens);
@ -105,7 +105,7 @@ pub(crate) fn parse_macro_args(
let mut vec_with_semi = false; let mut vec_with_semi = false;
let mut trailing_comma = false; let mut trailing_comma = false;
if DelimToken::Brace != style { if Delimiter::Brace != style {
loop { loop {
if let Some(arg) = check_keyword(&mut parser) { if let Some(arg) = check_keyword(&mut parser) {
args.push(arg); args.push(arg);

View file

@ -1,7 +1,7 @@
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::rc::Rc; use std::rc::Rc;
use rustc_ast::{ast, token::DelimToken, visit, AstLike}; use rustc_ast::{ast, token::Delimiter, visit, AstLike};
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use rustc_span::{symbol, BytePos, Pos, Span}; use rustc_span::{symbol, BytePos, Pos, Span};
@ -689,7 +689,7 @@ fn visit_mac(&mut self, mac: &ast::MacCall, ident: Option<symbol::Ident>, pos: M
// with whitespace between the delimiters and trailing semi (i.e. `foo!(abc) ;`) // with whitespace between the delimiters and trailing semi (i.e. `foo!(abc) ;`)
// are formatted correctly. // are formatted correctly.
let (span, rewrite) = match macro_style(mac, &self.get_context()) { let (span, rewrite) = match macro_style(mac, &self.get_context()) {
DelimToken::Bracket | DelimToken::Paren if MacroPosition::Item == pos => { Delimiter::Bracket | Delimiter::Parenthesis if MacroPosition::Item == pos => {
let search_span = mk_sp(mac.span().hi(), self.snippet_provider.end_pos()); let search_span = mk_sp(mac.span().hi(), self.snippet_provider.end_pos());
let hi = self.snippet_provider.span_before(search_span, ";"); let hi = self.snippet_provider.span_before(search_span, ";");
let target_span = mk_sp(mac.span().lo(), hi + BytePos(1)); let target_span = mk_sp(mac.span().lo(), hi + BytePos(1));