Auto merge of #122830 - matthiaskrgr:rollup-uk2by3f, r=matthiaskrgr

Rollup of 8 pull requests

Successful merges:

 - #122402 (Make `#[diagnostic::on_unimplemented]` format string parsing more robust)
 - #122644 (pattern analysis: add a custom test harness)
 - #122733 (Strip placeholders from hidden types before remapping generic parameter)
 - #122752 (Interpolated cleanups)
 - #122771 (add some comments to hir::ModuleItems)
 - #122793 (Implement macro-based deref!() syntax for deref patterns)
 - #122810 (Remove `target_override`)
 - #122827 (Remove unnecessary braces from `bug`/`span_bug`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-03-21 17:53:57 +00:00
commit a0569fa8f9
83 changed files with 1479 additions and 421 deletions

View file

@ -4440,6 +4440,8 @@ dependencies = [
"rustc_target",
"smallvec",
"tracing",
"tracing-subscriber",
"tracing-tree",
]
[[package]]

View file

@ -621,7 +621,9 @@ pub fn walk(&self, it: &mut impl FnMut(&Pat) -> bool) {
| PatKind::Or(s) => s.iter().for_each(|p| p.walk(it)),
// Trivial wrappers over inner patterns.
PatKind::Box(s) | PatKind::Ref(s, _) | PatKind::Paren(s) => s.walk(it),
PatKind::Box(s) | PatKind::Deref(s) | PatKind::Ref(s, _) | PatKind::Paren(s) => {
s.walk(it)
}
// These patterns do not contain subpatterns, skip.
PatKind::Wild
@ -792,6 +794,9 @@ pub enum PatKind {
/// A `box` pattern.
Box(P<Pat>),
/// A `deref` pattern (currently `deref!()` macro-based syntax).
Deref(P<Pat>),
/// A reference pattern (e.g., `&mut (a, b)`).
Ref(P<Pat>, Mutability),

View file

@ -1295,6 +1295,7 @@ pub fn noop_visit_pat<T: MutVisitor>(pat: &mut P<Pat>, vis: &mut T) {
fields.flat_map_in_place(|field| vis.flat_map_pat_field(field));
}
PatKind::Box(inner) => vis.visit_pat(inner),
PatKind::Deref(inner) => vis.visit_pat(inner),
PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner),
PatKind::Range(e1, e2, Spanned { span: _, node: _ }) => {
visit_opt(e1, |e| vis.visit_expr(e));

View file

@ -105,7 +105,7 @@ pub fn is_semantic_float(&self) -> bool {
}
}
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
/// Keep this in sync with `Token::can_begin_literal_maybe_minus` excluding unary negation.
pub fn from_token(token: &Token) -> Option<Lit> {
match token.uninterpolate().kind {
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
@ -664,7 +664,7 @@ pub fn is_ident_named(&self, name: Symbol) -> bool {
}
/// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool {
fn is_whole_path(&self) -> bool {
if let Interpolated(nt) = &self.kind
&& let NtPath(..) = &nt.0
{
@ -710,7 +710,7 @@ pub fn is_qpath_start(&self) -> bool {
pub fn is_path_start(&self) -> bool {
self == &ModSep
|| self.is_qpath_start()
|| self.is_path()
|| self.is_whole_path()
|| self.is_path_segment_keyword()
|| self.is_ident() && !self.is_reserved_ident()
}

View file

@ -28,18 +28,7 @@
use std::borrow::Cow;
use std::{cmp, fmt, iter};
/// When the main Rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token tree. This is a very
/// loose structure, such that all sorts of different AST fragments can
/// be passed to syntax extensions using a uniform type.
///
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS token tree against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
///
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
/// Part of a `TokenStream`.
#[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub enum TokenTree {
/// A single token. Should never be `OpenDelim` or `CloseDelim`, because

View file

@ -576,7 +576,10 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) -> V::Res
try_visit!(visitor.visit_path(path, pattern.id));
walk_list!(visitor, visit_pat_field, fields);
}
PatKind::Box(subpattern) | PatKind::Ref(subpattern, _) | PatKind::Paren(subpattern) => {
PatKind::Box(subpattern)
| PatKind::Deref(subpattern)
| PatKind::Ref(subpattern, _)
| PatKind::Paren(subpattern) => {
try_visit!(visitor.visit_pat(subpattern));
}
PatKind::Ident(_, ident, optional_subpattern) => {

View file

@ -91,6 +91,9 @@ fn lower_pat_mut(&mut self, mut pattern: &Pat) -> hir::Pat<'hir> {
PatKind::Box(inner) => {
break hir::PatKind::Box(self.lower_pat(inner));
}
PatKind::Deref(inner) => {
break hir::PatKind::Deref(self.lower_pat(inner));
}
PatKind::Ref(inner, mutbl) => {
break hir::PatKind::Ref(self.lower_pat(inner), *mutbl);
}

View file

@ -413,10 +413,7 @@ fn visit_pat(&mut self, pattern: &'a ast::Pat) {
}
}
PatKind::Box(..) => {
if !self.features.deref_patterns {
// Allow box patterns under `deref_patterns`.
gate!(&self, box_patterns, pattern.span, "box pattern syntax is experimental");
}
gate!(&self, box_patterns, pattern.span, "box pattern syntax is experimental");
}
PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => {
gate!(
@ -610,10 +607,7 @@ macro_rules! gate_all_legacy_dont_use {
};
}
if !visitor.features.deref_patterns {
// Allow box patterns under `deref_patterns`.
gate_all_legacy_dont_use!(box_patterns, "box pattern syntax is experimental");
}
gate_all_legacy_dont_use!(box_patterns, "box pattern syntax is experimental");
gate_all_legacy_dont_use!(trait_alias, "trait aliases are experimental");
// Despite being a new feature, `where T: Trait<Assoc(): Sized>`, which is RTN syntax now,
// used to be gated under associated_type_bounds, which are right above, so RTN needs to

View file

@ -1626,6 +1626,12 @@ fn print_pat(&mut self, pat: &ast::Pat) {
self.word("box ");
self.print_pat(inner);
}
PatKind::Deref(inner) => {
self.word("deref!");
self.popen();
self.print_pat(inner);
self.pclose();
}
PatKind::Ref(inner, mutbl) => {
self.word("&");
if mutbl.is_mut() {

View file

@ -192,6 +192,11 @@ pub(crate) fn infer_opaque_types(
.find(|ur_vid| self.eval_equal(vid, **ur_vid))
.and_then(|ur_vid| self.definitions[*ur_vid].external_name)
.unwrap_or(infcx.tcx.lifetimes.re_erased),
ty::RePlaceholder(_) => ty::Region::new_error_with_message(
infcx.tcx,
concrete_type.span,
"hidden type contains placeholders, we don't support higher kinded opaques yet",
),
_ => region,
});
debug!(?universal_concrete_type);

View file

@ -21,7 +21,6 @@
};
use rustc_span::symbol::Symbol;
use rustc_target::abi::call::FnAbi;
use rustc_target::spec::Target;
use std::fmt;
@ -70,12 +69,6 @@ fn target_features(&self, _sess: &Session, _allow_unstable: bool) -> Vec<Symbol>
fn print_passes(&self) {}
fn print_version(&self) {}
/// If this plugin provides additional builtin targets, provide the one enabled by the options here.
/// Be careful: this is called *before* init() is called.
fn target_override(&self, _opts: &config::Options) -> Option<Target> {
None
}
/// The metadata loader used to load rlib and dylib metadata.
///
/// Alternative codegen backends may want to use different rlib or dylib formats than the

View file

@ -890,7 +890,7 @@ pub fn version_at_macro_invocation(
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
let opts = config::Options::default();
let sysroot = filesearch::materialize_sysroot(opts.maybe_sysroot.clone());
let target = config::build_target_config(early_dcx, &opts, None, &sysroot);
let target = config::build_target_config(early_dcx, &opts, &sysroot);
get_codegen_backend(early_dcx, &sysroot, backend_name, &target).print_version();
}
@ -1100,7 +1100,7 @@ pub fn describe_flag_categories(early_dcx: &EarlyDiagCtxt, matches: &Matches) ->
let opts = config::Options::default();
let sysroot = filesearch::materialize_sysroot(opts.maybe_sysroot.clone());
let target = config::build_target_config(early_dcx, &opts, None, &sysroot);
let target = config::build_target_config(early_dcx, &opts, &sysroot);
get_codegen_backend(early_dcx, &sysroot, backend_name, &target).print_passes();
return true;

View file

@ -392,12 +392,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
#[derive(Debug, Clone)]
pub(crate) enum NamedMatch {
MatchedSeq(Vec<NamedMatch>),
// A metavar match of type `tt`.
MatchedTokenTree(rustc_ast::tokenstream::TokenTree),
// A metavar match of any type other than `tt`.
MatchedNonterminal(Lrc<(Nonterminal, rustc_span::Span)>),
MatchedSingle(ParseNtResult<Lrc<(Nonterminal, Span)>>),
}
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
@ -691,11 +686,11 @@ pub(super) fn parse_tt<'matcher, T: Tracker<'matcher>>(
}
Ok(nt) => nt,
};
let m = match nt {
ParseNtResult::Nt(nt) => MatchedNonterminal(Lrc::new((nt, span))),
ParseNtResult::Tt(tt) => MatchedTokenTree(tt),
};
mp.push_match(next_metavar, seq_depth, m);
mp.push_match(
next_metavar,
seq_depth,
MatchedSingle(nt.map_nt(|nt| (Lrc::new((nt, span))))),
);
mp.idx += 1;
} else {
unreachable!()

View file

@ -5,7 +5,7 @@
use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg};
use crate::mbe::macro_check;
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser};
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
use crate::mbe::macro_parser::{MatcherLoc, NamedMatch::*};
use crate::mbe::transcribe::transcribe;
use ast::token::IdentIsRaw;
@ -22,7 +22,7 @@
RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
};
use rustc_lint_defs::BuiltinLintDiag;
use rustc_parse::parser::{Parser, Recovery};
use rustc_parse::parser::{ParseNtResult, Parser, Recovery};
use rustc_session::parse::ParseSess;
use rustc_session::Session;
use rustc_span::edition::Edition;
@ -479,7 +479,7 @@ pub fn compile_declarative_macro(
MatchedSeq(s) => s
.iter()
.map(|m| {
if let MatchedTokenTree(tt) = m {
if let MatchedSingle(ParseNtResult::Tt(tt)) = m {
let tt = mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]),
true,
@ -505,7 +505,7 @@ pub fn compile_declarative_macro(
MatchedSeq(s) => s
.iter()
.map(|m| {
if let MatchedTokenTree(tt) = m {
if let MatchedSingle(ParseNtResult::Tt(tt)) = m {
return mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]),
false,

View file

@ -3,14 +3,14 @@
CountRepetitionMisplaced, MetaVarExprUnrecognizedVar, MetaVarsDifSeqMatchers, MustRepeatOnce,
NoSyntaxVarsExprRepeat, VarStillRepeating,
};
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch};
use crate::mbe::macro_parser::{NamedMatch, NamedMatch::*};
use crate::mbe::{self, KleeneOp, MetaVarExpr};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::Diag;
use rustc_errors::{pluralize, PResult};
use rustc_errors::{pluralize, Diag, PResult};
use rustc_parse::parser::ParseNtResult;
use rustc_span::hygiene::{LocalExpnId, Transparency};
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
use rustc_span::{with_metavar_spans, Span, SyntaxContext};
@ -250,26 +250,25 @@ pub(super) fn transcribe<'a>(
// the meta-var.
let ident = MacroRulesNormalizedIdent::new(original_ident);
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
match cur_matched {
MatchedTokenTree(tt) => {
let tt = match cur_matched {
MatchedSingle(ParseNtResult::Tt(tt)) => {
// `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups.
let tt = maybe_use_metavar_location(cx, &stack, sp, tt, &mut marker);
result.push(tt);
maybe_use_metavar_location(cx, &stack, sp, tt, &mut marker)
}
MatchedNonterminal(nt) => {
MatchedSingle(ParseNtResult::Nt(nt)) => {
// Other variables are emitted into the output stream as groups with
// `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser.
marker.visit_span(&mut sp);
result
.push(TokenTree::token_alone(token::Interpolated(nt.clone()), sp));
TokenTree::token_alone(token::Interpolated(nt.clone()), sp)
}
MatchedSeq(..) => {
// We were unable to descend far enough. This is an error.
return Err(cx.dcx().create_err(VarStillRepeating { span: sp, ident }));
}
}
};
result.push(tt)
} else {
// If we aren't able to match the meta-var, we push it back into the result but
// with modified syntax context. (I believe this supports nested macros).
@ -424,7 +423,7 @@ fn lookup_cur_matched<'a>(
interpolations.get(&ident).map(|mut matched| {
for &(idx, _) in repeats {
match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => break,
MatchedSingle(_) => break,
MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
}
}
@ -514,7 +513,7 @@ fn lockstep_iter_size(
let name = MacroRulesNormalizedIdent::new(*name);
match lookup_cur_matched(name, interpolations, repeats) {
Some(matched) => match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
MatchedSingle(_) => LockstepIterSize::Unconstrained,
MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
},
_ => LockstepIterSize::Unconstrained,
@ -557,7 +556,7 @@ fn count_repetitions<'a>(
// (or at the top-level of `matched` if no depth is given).
fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => Ok(1),
MatchedSingle(_) => Ok(1),
MatchedSeq(named_matches) => {
if depth_curr == depth_max {
Ok(named_matches.len())
@ -571,7 +570,7 @@ fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResu
/// Maximum depth
fn depth(counter: usize, matched: &NamedMatch) -> usize {
match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => counter,
MatchedSingle(_) => counter,
MatchedSeq(named_matches) => {
let rslt = counter + 1;
if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
@ -599,7 +598,7 @@ fn depth(counter: usize, matched: &NamedMatch) -> usize {
}
}
if let MatchedTokenTree(_) | MatchedNonterminal(_) = matched {
if let MatchedSingle(_) = matched {
return Err(cx.dcx().create_err(CountRepetitionMisplaced { span: sp.entire() }));
}

View file

@ -1015,7 +1015,7 @@ fn walk_short_(&self, it: &mut impl FnMut(&Pat<'hir>) -> bool) -> bool {
use PatKind::*;
match self.kind {
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => true,
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it),
Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it),
Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)),
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().all(|p| p.walk_short_(it)),
Slice(before, slice, after) => {
@ -1042,7 +1042,7 @@ fn walk_(&self, it: &mut impl FnMut(&Pat<'hir>) -> bool) {
use PatKind::*;
match self.kind {
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => {}
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it),
Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it),
Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)),
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)),
Slice(before, slice, after) => {
@ -1185,6 +1185,9 @@ pub enum PatKind<'hir> {
/// A `box` pattern.
Box(&'hir Pat<'hir>),
/// A `deref` pattern (currently `deref!()` macro-based syntax).
Deref(&'hir Pat<'hir>),
/// A reference pattern (e.g., `&mut (a, b)`).
Ref(&'hir Pat<'hir>, Mutability),

View file

@ -660,7 +660,9 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) -> V:
PatKind::Tuple(tuple_elements, _) => {
walk_list!(visitor, visit_pat, tuple_elements);
}
PatKind::Box(ref subpattern) | PatKind::Ref(ref subpattern, _) => {
PatKind::Box(ref subpattern)
| PatKind::Deref(ref subpattern)
| PatKind::Ref(ref subpattern, _) => {
try_visit!(visitor.visit_pat(subpattern));
}
PatKind::Binding(_, _hir_id, ident, ref optional_subpattern) => {

View file

@ -668,7 +668,7 @@ fn is_binding_pat(pat: &hir::Pat<'_>) -> bool {
| PatKind::TupleStruct(_, subpats, _)
| PatKind::Tuple(subpats, _) => subpats.iter().any(|p| is_binding_pat(p)),
PatKind::Box(subpat) => is_binding_pat(subpat),
PatKind::Box(subpat) | PatKind::Deref(subpat) => is_binding_pat(subpat),
PatKind::Ref(_, _)
| PatKind::Binding(hir::BindingAnnotation(hir::ByRef::No, _), ..)

View file

@ -1808,6 +1808,12 @@ fn print_pat(&mut self, pat: &hir::Pat<'_>) {
self.pclose();
}
}
PatKind::Deref(inner) => {
self.word("deref!");
self.popen();
self.print_pat(inner);
self.pclose();
}
PatKind::Ref(inner, mutbl) => {
let is_range_inner = matches!(inner.kind, PatKind::Range(..));
self.word("&");

View file

@ -463,6 +463,7 @@ fn maybe_read_scrutinee<'t>(
}
PatKind::Or(_)
| PatKind::Box(_)
| PatKind::Deref(_)
| PatKind::Ref(..)
| PatKind::Wild
| PatKind::Err(_) => {

View file

@ -719,7 +719,7 @@ fn cat_pattern_<F>(
self.cat_pattern_(place_with_id, subpat, op)?;
}
PatKind::Box(subpat) | PatKind::Ref(subpat, _) => {
PatKind::Box(subpat) | PatKind::Ref(subpat, _) | PatKind::Deref(subpat) => {
// box p1, &p1, &mut p1. we can ignore the mutability of
// PatKind::Ref since that information is already contained
// in the type.

View file

@ -210,10 +210,8 @@ fn check_pat(&self, pat: &'tcx Pat<'tcx>, expected: Ty<'tcx>, pat_info: PatInfo<
PatKind::Tuple(elements, ddpos) => {
self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info)
}
PatKind::Box(inner) if self.tcx.features().deref_patterns => {
self.check_pat_deref(pat.span, inner, expected, pat_info)
}
PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info),
PatKind::Deref(inner) => self.check_pat_deref(pat.span, inner, expected, pat_info),
PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info),
PatKind::Slice(before, slice, after) => {
self.check_pat_slice(pat.span, before, slice, after, expected, pat_info)
@ -297,6 +295,7 @@ fn calc_adjust_mode(&self, pat: &'tcx Pat<'tcx>, opt_path_res: Option<Res>) -> A
| PatKind::TupleStruct(..)
| PatKind::Tuple(..)
| PatKind::Box(_)
| PatKind::Deref(_)
| PatKind::Range(..)
| PatKind::Slice(..) => AdjustMode::Peel,
// A never pattern behaves somewhat like a literal or unit variant.
@ -762,6 +761,7 @@ fn borrow_pat_suggestion(&self, err: &mut Diag<'_>, pat: &Pat<'_>) {
| PatKind::Binding(..)
| PatKind::Path(..)
| PatKind::Box(..)
| PatKind::Deref(_)
| PatKind::Ref(..)
| PatKind::Lit(..)
| PatKind::Range(..)

View file

@ -148,7 +148,7 @@ pub fn check_dirty_clean_annotations(tcx: TyCtxt<'_>) {
let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() {
for id in crate_items.free_items() {
dirty_clean_visitor.check_item(id.owner_id.def_id);
}

View file

@ -341,51 +341,22 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
let sysroot = filesearch::materialize_sysroot(config.opts.maybe_sysroot.clone());
let (codegen_backend, target_override) = match config.make_codegen_backend {
None => {
// Build a target without override, so that it can override the backend if needed
let target =
config::build_target_config(&early_dcx, &config.opts, None, &sysroot);
let target = config::build_target_config(&early_dcx, &config.opts, &sysroot);
let backend = util::get_codegen_backend(
&early_dcx,
&sysroot,
config.opts.unstable_opts.codegen_backend.as_deref(),
&target,
);
// target_override is documented to be called before init(), so this is okay
let target_override = backend.target_override(&config.opts);
// Assert that we don't use target's override of the backend and
// backend's override of the target at the same time
if config.opts.unstable_opts.codegen_backend.is_none()
&& target.default_codegen_backend.is_some()
&& target_override.is_some()
{
rustc_middle::bug!(
"Codegen backend requested target override even though the target requested the backend"
);
}
(backend, target_override)
}
let codegen_backend = match config.make_codegen_backend {
None => util::get_codegen_backend(
&early_dcx,
&sysroot,
config.opts.unstable_opts.codegen_backend.as_deref(),
&target,
),
Some(make_codegen_backend) => {
// N.B. `make_codegen_backend` takes precedence over `target.default_codegen_backend`,
// which is ignored in this case.
let backend = make_codegen_backend(&config.opts);
// target_override is documented to be called before init(), so this is okay
let target_override = backend.target_override(&config.opts);
(backend, target_override)
// N.B. `make_codegen_backend` takes precedence over
// `target.default_codegen_backend`, which is ignored in this case.
make_codegen_backend(&config.opts)
}
};
// Re-build target with the (potential) override
let target_cfg =
config::build_target_config(&early_dcx, &config.opts, target_override, &sysroot);
let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
let bundle = match rustc_errors::fluent_bundle(
@ -418,7 +389,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
locale_resources,
config.lint_caps,
config.file_loader,
target_cfg,
target,
sysroot,
util::rustc_version_str().unwrap_or("unknown"),
config.ice_file,

View file

@ -41,8 +41,7 @@ fn mk_session(matches: getopts::Matches) -> (Session, Cfg) {
let sysroot = filesearch::materialize_sysroot(sessopts.maybe_sysroot.clone());
let target_cfg =
rustc_session::config::build_target_config(&early_dcx, &sessopts, None, &sysroot);
let target = rustc_session::config::build_target_config(&early_dcx, &sessopts, &sysroot);
let sess = build_session(
early_dcx,
@ -53,7 +52,7 @@ fn mk_session(matches: getopts::Matches) -> (Session, Cfg) {
vec![],
Default::default(),
None,
target_cfg,
target,
sysroot,
"",
None,

View file

@ -1183,7 +1183,7 @@ fn check_pat(&mut self, cx: &EarlyContext<'_>, p: &ast::Pat) {
self.check_unused_parens_pat(cx, &f.pat, false, false, keep_space);
},
// Avoid linting on `i @ (p0 | .. | pn)` and `box (p0 | .. | pn)`, #64106.
Ident(.., Some(p)) | Box(p) => self.check_unused_parens_pat(cx, p, true, false, keep_space),
Ident(.., Some(p)) | Box(p) | Deref(p) => self.check_unused_parens_pat(cx, p, true, false, keep_space),
// Avoid linting on `&(mut x)` as `&mut x` has a different meaning, #55342.
// Also avoid linting on `& mut? (p0 | .. | pn)`, #64106.
Ref(p, m) => self.check_unused_parens_pat(cx, p, true, *m == Mutability::Not, keep_space),

View file

@ -166,12 +166,12 @@ pub fn root_module(self) -> &'hir Mod<'hir> {
#[inline]
pub fn items(self) -> impl Iterator<Item = ItemId> + 'hir {
self.tcx.hir_crate_items(()).items.iter().copied()
self.tcx.hir_crate_items(()).free_items.iter().copied()
}
#[inline]
pub fn module_items(self, module: LocalModDefId) -> impl Iterator<Item = ItemId> + 'hir {
self.tcx.hir_module_items(module).items()
self.tcx.hir_module_items(module).free_items()
}
pub fn def_key(self, def_id: LocalDefId) -> DefKey {
@ -418,7 +418,7 @@ pub fn visit_all_item_likes_in_crate<V>(self, visitor: &mut V) -> V::Result
V: Visitor<'hir>,
{
let krate = self.tcx.hir_crate_items(());
walk_list!(visitor, visit_item, krate.items().map(|id| self.item(id)));
walk_list!(visitor, visit_item, krate.free_items().map(|id| self.item(id)));
walk_list!(visitor, visit_trait_item, krate.trait_items().map(|id| self.trait_item(id)));
walk_list!(visitor, visit_impl_item, krate.impl_items().map(|id| self.impl_item(id)));
walk_list!(
@ -436,7 +436,7 @@ pub fn visit_item_likes_in_module<V>(self, module: LocalModDefId, visitor: &mut
V: Visitor<'hir>,
{
let module = self.tcx.hir_module_items(module);
walk_list!(visitor, visit_item, module.items().map(|id| self.item(id)));
walk_list!(visitor, visit_item, module.free_items().map(|id| self.item(id)));
walk_list!(visitor, visit_trait_item, module.trait_items().map(|id| self.trait_item(id)));
walk_list!(visitor, visit_impl_item, module.impl_items().map(|id| self.impl_item(id)));
walk_list!(
@ -1197,7 +1197,7 @@ pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> Mod
} = collector;
return ModuleItems {
submodules: submodules.into_boxed_slice(),
items: items.into_boxed_slice(),
free_items: items.into_boxed_slice(),
trait_items: trait_items.into_boxed_slice(),
impl_items: impl_items.into_boxed_slice(),
foreign_items: foreign_items.into_boxed_slice(),
@ -1226,7 +1226,7 @@ pub(crate) fn hir_crate_items(tcx: TyCtxt<'_>, _: ()) -> ModuleItems {
return ModuleItems {
submodules: submodules.into_boxed_slice(),
items: items.into_boxed_slice(),
free_items: items.into_boxed_slice(),
trait_items: trait_items.into_boxed_slice(),
impl_items: impl_items.into_boxed_slice(),
foreign_items: foreign_items.into_boxed_slice(),

View file

@ -22,7 +22,7 @@
#[derive(Debug, HashStable, Encodable, Decodable)]
pub struct ModuleItems {
submodules: Box<[OwnerId]>,
items: Box<[ItemId]>,
free_items: Box<[ItemId]>,
trait_items: Box<[TraitItemId]>,
impl_items: Box<[ImplItemId]>,
foreign_items: Box<[ForeignItemId]>,
@ -30,14 +30,22 @@ pub struct ModuleItems {
}
impl ModuleItems {
pub fn items(&self) -> impl Iterator<Item = ItemId> + '_ {
self.items.iter().copied()
/// Returns all non-associated locally defined items in all modules.
///
/// Note that this does *not* include associated items of `impl` blocks! It also does not
/// include foreign items. If you want to e.g. get all functions, use `definitions()` below.
///
/// However, this does include the `impl` blocks themselves.
pub fn free_items(&self) -> impl Iterator<Item = ItemId> + '_ {
self.free_items.iter().copied()
}
pub fn trait_items(&self) -> impl Iterator<Item = TraitItemId> + '_ {
self.trait_items.iter().copied()
}
/// Returns all items that are associated with some `impl` block (both inherent and trait impl
/// blocks).
pub fn impl_items(&self) -> impl Iterator<Item = ImplItemId> + '_ {
self.impl_items.iter().copied()
}
@ -47,7 +55,7 @@ pub fn foreign_items(&self) -> impl Iterator<Item = ForeignItemId> + '_ {
}
pub fn owners(&self) -> impl Iterator<Item = OwnerId> + '_ {
self.items
self.free_items
.iter()
.map(|id| id.owner_id)
.chain(self.trait_items.iter().map(|id| id.owner_id))
@ -63,7 +71,7 @@ pub fn par_items(
&self,
f: impl Fn(ItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync,
) -> Result<(), ErrorGuaranteed> {
try_par_for_each_in(&self.items[..], |&id| f(id))
try_par_for_each_in(&self.free_items[..], |&id| f(id))
}
pub fn par_trait_items(

View file

@ -11,12 +11,18 @@
/// [`span_bug`]: crate::span_bug
#[macro_export]
macro_rules! bug {
() => ( $crate::bug!("impossible case reached") );
($msg:expr) => ({ $crate::util::bug::bug_fmt(::std::format_args!($msg)) });
($msg:expr,) => ({ $crate::bug!($msg) });
($fmt:expr, $($arg:tt)+) => ({
() => (
$crate::bug!("impossible case reached")
);
($msg:expr) => (
$crate::util::bug::bug_fmt(::std::format_args!($msg))
);
($msg:expr,) => (
$crate::bug!($msg)
);
($fmt:expr, $($arg:tt)+) => (
$crate::util::bug::bug_fmt(::std::format_args!($fmt, $($arg)+))
});
);
}
/// A macro for triggering an ICE with a span.
@ -30,11 +36,15 @@ macro_rules! bug {
/// [`DiagCtxt::span_delayed_bug`]: rustc_errors::DiagCtxt::span_delayed_bug
#[macro_export]
macro_rules! span_bug {
($span:expr, $msg:expr) => ({ $crate::util::bug::span_bug_fmt($span, ::std::format_args!($msg)) });
($span:expr, $msg:expr,) => ({ $crate::span_bug!($span, $msg) });
($span:expr, $fmt:expr, $($arg:tt)+) => ({
($span:expr, $msg:expr) => (
$crate::util::bug::span_bug_fmt($span, ::std::format_args!($msg))
);
($span:expr, $msg:expr,) => (
$crate::span_bug!($span, $msg)
);
($span:expr, $fmt:expr, $($arg:tt)+) => (
$crate::util::bug::span_bug_fmt($span, ::std::format_args!($fmt, $($arg)+))
});
);
}
///////////////////////////////////////////////////////////////////////////

View file

@ -1179,7 +1179,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{subpattern}")
}
PatKind::DerefPattern { ref subpattern } => {
write!(f, "k#deref {subpattern}")
write!(f, "deref!({subpattern})")
}
PatKind::Constant { value } => write!(f, "{value}"),
PatKind::InlineConstant { def: _, ref subpattern } => {

View file

@ -257,7 +257,7 @@ fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat<'tcx>) -> Box<Pat<'tc
return self.lower_path(qpath, pat.hir_id, pat.span);
}
hir::PatKind::Box(subpattern) if self.tcx.features().deref_patterns => {
hir::PatKind::Deref(subpattern) => {
PatKind::DerefPattern { subpattern: self.lower_pattern(subpattern) }
}
hir::PatKind::Ref(subpattern, _) | hir::PatKind::Box(subpattern) => {

View file

@ -1526,7 +1526,7 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionStrategy) -> Vec<MonoI
let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() {
for id in crate_items.free_items() {
collector.process_item(id);
}

View file

@ -390,8 +390,6 @@ parse_invalid_dyn_keyword = invalid `dyn` keyword
parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else`
parse_invalid_identifier_with_leading_number = identifiers cannot start with a number
parse_invalid_interpolated_expression = invalid interpolated expression
parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid
.label = invalid suffix `{$suffix}`
.tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases

View file

@ -850,13 +850,6 @@ pub(crate) struct StructLiteralNotAllowedHereSugg {
pub right: Span,
}
#[derive(Diagnostic)]
#[diag(parse_invalid_interpolated_expression)]
pub(crate) struct InvalidInterpolatedExpression {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(parse_invalid_literal_suffix_on_tuple_index)]
pub(crate) struct InvalidLiteralSuffixOnTupleIndex {

View file

@ -3,11 +3,12 @@
SuffixedLiteralInAttribute,
};
use crate::fluent_generated as fluent;
use crate::maybe_whole;
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::token::{self, Delimiter, Nonterminal};
use rustc_ast::token::{self, Delimiter};
use rustc_errors::{codes::*, Diag, PResult};
use rustc_span::{sym, BytePos, Span};
use thin_vec::ThinVec;
@ -251,25 +252,15 @@ pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerA
/// PATH `=` UNSUFFIXED_LIT
/// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
let item = match &self.token.kind {
token::Interpolated(nt) => match &nt.0 {
Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
_ => None,
},
_ => None,
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
let do_parse = |this: &mut Self| {
let path = this.parse_path(PathStyle::Mod)?;
let args = this.parse_attr_args()?;
Ok(ast::AttrItem { path, args, tokens: None })
};
Ok(if let Some(item) = item {
self.bump();
item
} else {
let do_parse = |this: &mut Self| {
let path = this.parse_path(PathStyle::Mod)?;
let args = this.parse_attr_args()?;
Ok(ast::AttrItem { path, args, tokens: None })
};
// Attr items don't have attributes
if capture_tokens { self.collect_tokens_no_attrs(do_parse) } else { do_parse(self) }?
})
// Attr items don't have attributes
if capture_tokens { self.collect_tokens_no_attrs(do_parse) } else { do_parse(self) }
}
/// Parses attributes that appear after the opening of an item. These should
@ -371,22 +362,18 @@ pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::NestedMe
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
/// ```
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match &self.token.kind {
token::Interpolated(nt) => match &nt.0 {
token::NtMeta(e) => Some(e.clone()),
_ => None,
},
_ => None,
};
if let Some(item) = nt_meta {
match item.meta(item.path.span) {
// We can't use `maybe_whole` here because it would bump in the `None`
// case, which we don't want.
if let token::Interpolated(nt) = &self.token.kind
&& let token::NtMeta(attr_item) = &nt.0
{
match attr_item.meta(attr_item.path.span) {
Some(meta) => {
self.bump();
return Ok(meta);
}
None => self.unexpected()?,
};
}
}
let lo = self.token.span;

View file

@ -2053,16 +2053,6 @@ fn handle_missing_lit<L>(
&mut self,
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
) -> PResult<'a, L> {
if let token::Interpolated(nt) = &self.token.kind
&& let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
&& matches!(e.kind, ExprKind::Err(_))
{
let mut err = self
.dcx()
.create_err(errors::InvalidInterpolatedExpression { span: self.token.span });
err.downgrade_to_delayed_bug();
return Err(err);
}
let token = self.token.clone();
let err = |self_: &Self| {
let msg = format!("unexpected token: {}", super::token_descr(&token));

View file

@ -6,6 +6,7 @@
};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent;
use crate::maybe_whole;
use ast::token::IdentIsRaw;
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
@ -115,17 +116,10 @@ pub(super) fn parse_item_common(
fn_parse_mode: FnParseMode,
force_collect: ForceCollect,
) -> PResult<'a, Option<Item>> {
// Don't use `maybe_whole` so that we have precise control
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind
&& let token::NtItem(item) = &nt.0
{
let mut item = item.clone();
self.bump();
maybe_whole!(self, NtItem, |item| {
attrs.prepend_to_nt_inner(&mut item.attrs);
return Ok(Some(item.into_inner()));
};
Some(item.into_inner())
});
let item =
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {

View file

@ -20,7 +20,7 @@
pub use path::PathStyle;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
use rustc_ast::util::case::Case;
@ -93,12 +93,13 @@ pub enum TrailingToken {
#[macro_export]
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = &$p.token.kind {
if let token::$constructor(x) = &nt.0 {
let $x = x.clone();
$p.bump();
return Ok($e);
}
if let token::Interpolated(nt) = &$p.token.kind
&& let token::$constructor(x) = &nt.0
{
#[allow(unused_mut)]
let mut $x = x.clone();
$p.bump();
return Ok($e);
}
};
}
@ -1407,7 +1408,7 @@ fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) ->
/// so emit a proper diagnostic.
// Public for rustfmt usage.
pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x.into_inner());
maybe_whole!(self, NtVis, |vis| vis.into_inner());
if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
@ -1584,8 +1585,21 @@ pub enum FlatToken {
Empty,
}
#[derive(Debug)]
pub enum ParseNtResult {
Nt(Nonterminal),
// Metavar captures of various kinds.
#[derive(Clone, Debug)]
pub enum ParseNtResult<NtType> {
Tt(TokenTree),
Nt(NtType),
}
impl<T> ParseNtResult<T> {
pub fn map_nt<F, U>(self, mut f: F) -> ParseNtResult<U>
where
F: FnMut(T) -> U,
{
match self {
ParseNtResult::Tt(tt) => ParseNtResult::Tt(tt),
ParseNtResult::Nt(nt) => ParseNtResult::Nt(f(nt)),
}
}
}

View file

@ -1,5 +1,5 @@
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token};
use rustc_ast::token::{self, Delimiter, Nonterminal, Nonterminal::*, NonterminalKind, Token};
use rustc_ast::HasTokens;
use rustc_ast_pretty::pprust;
use rustc_errors::PResult;
@ -66,15 +66,14 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false,
},
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
match &token.kind {
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => match &token.kind {
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
token::OpenDelim(Delimiter::Bracket) | // slice pattern
token::BinOp(token::And) | // reference
token::BinOp(token::Minus) | // negative literal
token::AndAnd | // double reference
token::Literal(_) | // literal
token::Literal(_) | // literal
token::DotDot | // range pattern (future compat)
token::DotDotDot | // range pattern (future compat)
token::ModSep | // path
@ -84,8 +83,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false,
}
}
},
NonterminalKind::Lifetime => match &token.kind {
token::Lifetime(_) => true,
token::Interpolated(nt) => {
@ -102,7 +100,10 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
/// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call
/// site.
#[inline]
pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseNtResult> {
pub fn parse_nonterminal(
&mut self,
kind: NonterminalKind,
) -> PResult<'a, ParseNtResult<Nonterminal>> {
// A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
// which requires having captured tokens available. Since we cannot determine
// in advance whether or not a proc-macro will be (transitively) invoked,

View file

@ -435,7 +435,7 @@ fn parse_pat_with_range_pat(
syntax_loc: Option<PatternLocation>,
) -> PResult<'a, P<Pat>> {
maybe_recover_from_interpolated_ty_qpath!(self, true);
maybe_whole!(self, NtPat, |x| x);
maybe_whole!(self, NtPat, |pat| pat);
let mut lo = self.token.span;
@ -498,11 +498,14 @@ fn parse_pat_with_range_pat(
} else {
PatKind::Lit(const_expr)
}
} else if self.is_builtin() {
self.parse_pat_builtin()?
}
// Don't eagerly error on semantically invalid tokens when matching
// declarative macros, as the input to those doesn't have to be
// semantically valid. For attribute/derive proc macros this is not the
// case, so doing the recovery for them is fine.
} else if self.can_be_ident_pat()
else if self.can_be_ident_pat()
|| (self.is_lit_bad_ident().is_some() && self.may_recover())
{
// Parse `ident @ pat`
@ -1119,6 +1122,21 @@ fn isnt_pattern_start(&self) -> bool {
.contains(&self.token.kind)
}
fn parse_pat_builtin(&mut self) -> PResult<'a, PatKind> {
self.parse_builtin(|self_, _lo, ident| {
Ok(match ident.name {
// builtin#deref(PAT)
sym::deref => Some(ast::PatKind::Deref(self_.parse_pat_allow_top_alt(
None,
RecoverComma::Yes,
RecoverColon::Yes,
CommaRecoveryMode::LikelyTuple,
)?)),
_ => None,
})
})
}
/// Parses `box pat`
fn parse_pat_box(&mut self) -> PResult<'a, PatKind> {
let box_span = self.prev_token.span;

View file

@ -40,8 +40,8 @@ pub fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<
}))
}
/// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of whether
/// or not we have attributes
/// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
/// whether or not we have attributes.
// Public for `cfg_eval` macro expansion.
pub fn parse_stmt_without_recovery(
&mut self,
@ -51,18 +51,12 @@ pub fn parse_stmt_without_recovery(
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
// Don't use `maybe_whole` so that we have precise control
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind
&& let token::NtStmt(stmt) = &nt.0
{
let mut stmt = stmt.clone();
self.bump();
maybe_whole!(self, NtStmt, |stmt| {
stmt.visit_attrs(|stmt_attrs| {
attrs.prepend_to_nt_inner(stmt_attrs);
});
return Ok(Some(stmt.into_inner()));
}
Some(stmt.into_inner())
});
if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
self.bump();
@ -539,7 +533,7 @@ pub(super) fn parse_block_common(
blk_mode: BlockCheckMode,
can_be_struct_literal: bool,
) -> PResult<'a, (AttrVec, P<Block>)> {
maybe_whole!(self, NtBlock, |x| (AttrVec::new(), x));
maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block));
let maybe_ident = self.prev_token.clone();
self.maybe_recover_unexpected_block_label();
@ -643,7 +637,7 @@ pub fn parse_full_stmt(
recover: AttemptLocalParseRecovery,
) -> PResult<'a, Option<Stmt>> {
// Skip looking for a trailing semicolon when we have an interpolated statement.
maybe_whole!(self, NtStmt, |x| Some(x.into_inner()));
maybe_whole!(self, NtStmt, |stmt| Some(stmt.into_inner()));
let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No)? else {
return Ok(None);

View file

@ -250,7 +250,7 @@ fn parse_ty_common(
) -> PResult<'a, P<Ty>> {
let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes;
maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
maybe_whole!(self, NtTy, |x| x);
maybe_whole!(self, NtTy, |ty| ty);
let lo = self.token.span;
let mut impl_dyn_multi = false;

View file

@ -831,7 +831,7 @@ fn create_and_seed_worklist(
.collect::<Vec<_>>();
let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() {
for id in crate_items.free_items() {
check_item(tcx, &mut worklist, &mut struct_constructors, &mut unsolved_impl_item, id);
}
@ -1084,7 +1084,7 @@ fn check_mod_deathness(tcx: TyCtxt<'_>, module: LocalModDefId) {
let module_items = tcx.hir_module_items(module);
for item in module_items.items() {
for item in module_items.free_items() {
let def_kind = tcx.def_kind(item.owner_id);
let mut dead_codes = Vec::new();

View file

@ -300,6 +300,7 @@ fn visit_pat(&mut self, p: &'v hir::Pat<'v>) {
Path,
Tuple,
Box,
Deref,
Ref,
Lit,
Range,
@ -566,6 +567,7 @@ fn visit_pat(&mut self, p: &'v ast::Pat) {
Path,
Tuple,
Box,
Deref,
Ref,
Lit,
Range,

View file

@ -437,7 +437,7 @@ fn reachable_set(tcx: TyCtxt<'_>, (): ()) -> LocalDefIdSet {
// trait is a lang item.
let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() {
for id in crate_items.free_items() {
check_item(tcx, id, &mut reachable_context.worklist, effective_visibilities);
}

View file

@ -22,6 +22,10 @@ smallvec = { version = "1.8.1", features = ["union"] }
tracing = "0.1"
# tidy-alphabetical-end
[dev-dependencies]
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "ansi"] }
tracing-tree = "0.2.0"
[features]
default = ["rustc"]
rustc = [

View file

@ -819,6 +819,81 @@ pub(crate) fn is_covered_by(&self, cx: &Cx, other: &Self) -> Result<bool, Cx::Er
}
})
}
pub(crate) fn fmt_fields(
&self,
f: &mut fmt::Formatter<'_>,
ty: &Cx::Ty,
mut fields: impl Iterator<Item = impl fmt::Debug>,
) -> fmt::Result {
let mut first = true;
let mut start_or_continue = |s| {
if first {
first = false;
""
} else {
s
}
};
let mut start_or_comma = || start_or_continue(", ");
match self {
Struct | Variant(_) | UnionField => {
Cx::write_variant_name(f, self, ty)?;
// Without `cx`, we can't know which field corresponds to which, so we can't
// get the names of the fields. Instead we just display everything as a tuple
// struct, which should be good enough.
write!(f, "(")?;
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
write!(f, ")")?;
}
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
// be careful to detect strings here. However a string literal pattern will never
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
Ref => {
write!(f, "&{:?}", &fields.next().unwrap())?;
}
Slice(slice) => {
write!(f, "[")?;
match slice.kind {
SliceKind::FixedLen(_) => {
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
SliceKind::VarLen(prefix_len, _) => {
for p in fields.by_ref().take(prefix_len) {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
write!(f, "{}..", start_or_comma())?;
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
}
write!(f, "]")?;
}
Bool(b) => write!(f, "{b}")?,
// Best-effort, will render signed ranges incorrectly
IntRange(range) => write!(f, "{range:?}")?,
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
Str(value) => write!(f, "{value:?}")?,
Opaque(..) => write!(f, "<constant pattern>")?,
Or => {
for pat in fields {
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
}
}
Never => write!(f, "!")?,
Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
write!(f, "_ : {:?}", ty)?
}
}
Ok(())
}
}
#[derive(Debug, Clone, Copy)]

View file

@ -49,6 +49,12 @@ pub fn iter_enumerated(&self) -> impl Iterator<Item = (K, &V)> {
}
}
impl<V> FromIterator<V> for IdxContainer<usize, V> {
fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
Self(iter.into_iter().enumerate().collect())
}
}
#[derive(Debug)]
pub struct IdxSet<T>(pub rustc_hash::FxHashSet<T>);
impl<T: Idx> IdxSet<T> {
@ -120,7 +126,8 @@ fn ctor_sub_tys<'a>(
/// `DeconstructedPat`. Only invoqued when `pat.ctor()` is `Struct | Variant(_) | UnionField`.
fn write_variant_name(
f: &mut fmt::Formatter<'_>,
pat: &crate::pat::DeconstructedPat<Self>,
ctor: &crate::constructor::Constructor<Self>,
ty: &Self::Ty,
) -> fmt::Result;
/// Raise a bug.

View file

@ -138,81 +138,11 @@ pub fn walk<'a>(&'a self, it: &mut impl FnMut(&'a Self) -> bool) {
/// This is best effort and not good enough for a `Display` impl.
impl<Cx: PatCx> fmt::Debug for DeconstructedPat<Cx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let pat = self;
let mut first = true;
let mut start_or_continue = |s| {
if first {
first = false;
""
} else {
s
}
};
let mut start_or_comma = || start_or_continue(", ");
let mut fields: Vec<_> = (0..self.arity).map(|_| PatOrWild::Wild).collect();
for ipat in self.iter_fields() {
fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
}
match pat.ctor() {
Struct | Variant(_) | UnionField => {
Cx::write_variant_name(f, pat)?;
// Without `cx`, we can't know which field corresponds to which, so we can't
// get the names of the fields. Instead we just display everything as a tuple
// struct, which should be good enough.
write!(f, "(")?;
for p in fields {
write!(f, "{}", start_or_comma())?;
write!(f, "{p:?}")?;
}
write!(f, ")")
}
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
// be careful to detect strings here. However a string literal pattern will never
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
Ref => {
write!(f, "&{:?}", &fields[0])
}
Slice(slice) => {
write!(f, "[")?;
match slice.kind {
SliceKind::FixedLen(_) => {
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
SliceKind::VarLen(prefix_len, _) => {
for p in &fields[..prefix_len] {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
write!(f, "{}", start_or_comma())?;
write!(f, "..")?;
for p in &fields[prefix_len..] {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
}
write!(f, "]")
}
Bool(b) => write!(f, "{b}"),
// Best-effort, will render signed ranges incorrectly
IntRange(range) => write!(f, "{range:?}"),
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
Str(value) => write!(f, "{value:?}"),
Opaque(..) => write!(f, "<constant pattern>"),
Or => {
for pat in fields {
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
}
Ok(())
}
Never => write!(f, "!"),
Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
write!(f, "_ : {:?}", pat.ty())
}
}
self.ctor().fmt_fields(f, self.ty(), fields.into_iter())
}
}
@ -295,7 +225,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics
/// purposes. As such they don't use interning and can be cloned.
#[derive(Debug)]
pub struct WitnessPat<Cx: PatCx> {
ctor: Constructor<Cx>,
pub(crate) fields: Vec<WitnessPat<Cx>>,
@ -353,3 +282,10 @@ pub fn iter_fields(&self) -> impl Iterator<Item = &WitnessPat<Cx>> {
self.fields.iter()
}
}
/// This is best effort and not good enough for a `Display` impl.
impl<Cx: PatCx> fmt::Debug for WitnessPat<Cx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.ctor().fmt_fields(f, self.ty(), self.fields.iter())
}
}

View file

@ -880,13 +880,14 @@ fn ctors_for_ty(
fn write_variant_name(
f: &mut fmt::Formatter<'_>,
pat: &crate::pat::DeconstructedPat<Self>,
ctor: &crate::constructor::Constructor<Self>,
ty: &Self::Ty,
) -> fmt::Result {
if let ty::Adt(adt, _) = pat.ty().kind() {
if let ty::Adt(adt, _) = ty.kind() {
if adt.is_box() {
write!(f, "Box")?
} else {
let variant = adt.variant(Self::variant_index_for_adt(pat.ctor(), *adt));
let variant = adt.variant(Self::variant_index_for_adt(ctor, *adt));
write!(f, "{}", variant.name)?;
}
}

View file

@ -1042,7 +1042,7 @@ struct MatrixRow<'p, Cx: PatCx> {
is_under_guard: bool,
/// When we specialize, we remember which row of the original matrix produced a given row of the
/// specialized matrix. When we unspecialize, we use this to propagate usefulness back up the
/// callstack.
/// callstack. On creation, this stores the index of the original match arm.
parent_row: usize,
/// False when the matrix is just built. This is set to `true` by
/// [`compute_exhaustiveness_and_usefulness`] if the arm is found to be useful.
@ -1163,10 +1163,10 @@ fn new(arms: &[MatchArm<'p, Cx>], scrut_ty: Cx::Ty, scrut_validity: PlaceValidit
place_info: smallvec![place_info],
wildcard_row_is_relevant: true,
};
for (row_id, arm) in arms.iter().enumerate() {
for (arm_id, arm) in arms.iter().enumerate() {
let v = MatrixRow {
pats: PatStack::from_pattern(arm.pat),
parent_row: row_id, // dummy, we don't read it
parent_row: arm_id,
is_under_guard: arm.has_guard,
useful: false,
intersects: BitSet::new_empty(0), // Initialized in `Matrix::expand_and_push`.
@ -1738,6 +1738,9 @@ pub struct UsefulnessReport<'p, Cx: PatCx> {
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
/// exhaustiveness.
pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>,
/// For each arm, a set of indices of arms above it that have non-empty intersection, i.e. there
/// is a value matched by both arms. This may miss real intersections.
pub arm_intersections: Vec<BitSet<usize>>,
}
/// Computes whether a match is exhaustive and which of its arms are useful.
@ -1769,5 +1772,19 @@ pub fn compute_match_usefulness<'p, Cx: PatCx>(
})
.collect();
Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses })
let mut arm_intersections: Vec<_> =
arms.iter().enumerate().map(|(i, _)| BitSet::new_empty(i)).collect();
for row in matrix.rows() {
let arm_id = row.parent_row;
for intersection in row.intersects.iter() {
// Convert the matrix row ids into arm ids (they can differ because we expand or-patterns).
let arm_intersection = matrix.rows[intersection].parent_row;
// Note: self-intersection can happen with or-patterns.
if arm_intersection != arm_id {
arm_intersections[arm_id].insert(arm_intersection);
}
}
}
Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses, arm_intersections })
}

View file

@ -0,0 +1,315 @@
use rustc_pattern_analysis::{
constructor::{
Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, RangeEnd, VariantVisibility,
},
usefulness::{PlaceValidity, UsefulnessReport},
Captures, MatchArm, PatCx, PrivateUninhabitedField,
};
/// Sets up `tracing` for easier debugging. Tries to look like the `rustc` setup.
pub fn init_tracing() {
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::Layer;
let _ = tracing_tree::HierarchicalLayer::default()
.with_writer(std::io::stderr)
.with_indent_lines(true)
.with_ansi(true)
.with_targets(true)
.with_indent_amount(2)
.with_subscriber(
tracing_subscriber::Registry::default()
.with(tracing_subscriber::EnvFilter::from_default_env()),
)
.try_init();
}
/// A simple set of types.
#[allow(dead_code)]
#[derive(Debug, Copy, Clone)]
pub enum Ty {
/// Booleans
Bool,
/// 8-bit unsigned integers
U8,
/// Tuples.
Tuple(&'static [Ty]),
/// A struct with `arity` fields of type `ty`.
BigStruct { arity: usize, ty: &'static Ty },
/// A enum with `arity` variants of type `ty`.
BigEnum { arity: usize, ty: &'static Ty },
}
/// The important logic.
impl Ty {
pub fn sub_tys(&self, ctor: &Constructor<Cx>) -> Vec<Self> {
use Constructor::*;
match (ctor, *self) {
(Struct, Ty::Tuple(tys)) => tys.iter().copied().collect(),
(Struct, Ty::BigStruct { arity, ty }) => (0..arity).map(|_| *ty).collect(),
(Variant(_), Ty::BigEnum { ty, .. }) => vec![*ty],
(Bool(..) | IntRange(..) | NonExhaustive | Missing | Wildcard, _) => vec![],
_ => panic!("Unexpected ctor {ctor:?} for type {self:?}"),
}
}
pub fn ctor_set(&self) -> ConstructorSet<Cx> {
match *self {
Ty::Bool => ConstructorSet::Bool,
Ty::U8 => ConstructorSet::Integers {
range_1: IntRange::from_range(
MaybeInfiniteInt::new_finite_uint(0),
MaybeInfiniteInt::new_finite_uint(255),
RangeEnd::Included,
),
range_2: None,
},
Ty::Tuple(..) | Ty::BigStruct { .. } => ConstructorSet::Struct { empty: false },
Ty::BigEnum { arity, .. } => ConstructorSet::Variants {
variants: (0..arity).map(|_| VariantVisibility::Visible).collect(),
non_exhaustive: false,
},
}
}
pub fn write_variant_name(
&self,
f: &mut std::fmt::Formatter<'_>,
ctor: &Constructor<Cx>,
) -> std::fmt::Result {
match (*self, ctor) {
(Ty::Tuple(..), _) => Ok(()),
(Ty::BigStruct { .. }, _) => write!(f, "BigStruct"),
(Ty::BigEnum { .. }, Constructor::Variant(i)) => write!(f, "BigEnum::Variant{i}"),
_ => write!(f, "{:?}::{:?}", self, ctor),
}
}
}
/// Compute usefulness in our simple context (and set up tracing for easier debugging).
pub fn compute_match_usefulness<'p>(
arms: &[MatchArm<'p, Cx>],
ty: Ty,
scrut_validity: PlaceValidity,
complexity_limit: Option<usize>,
) -> Result<UsefulnessReport<'p, Cx>, ()> {
init_tracing();
rustc_pattern_analysis::usefulness::compute_match_usefulness(
&Cx,
arms,
ty,
scrut_validity,
complexity_limit,
)
}
#[derive(Debug)]
pub struct Cx;
/// The context for pattern analysis. Forwards anything interesting to `Ty` methods.
impl PatCx for Cx {
type Ty = Ty;
type Error = ();
type VariantIdx = usize;
type StrLit = ();
type ArmData = ();
type PatData = ();
fn is_exhaustive_patterns_feature_on(&self) -> bool {
false
}
fn is_min_exhaustive_patterns_feature_on(&self) -> bool {
false
}
fn ctor_arity(&self, ctor: &Constructor<Self>, ty: &Self::Ty) -> usize {
ty.sub_tys(ctor).len()
}
fn ctor_sub_tys<'a>(
&'a self,
ctor: &'a Constructor<Self>,
ty: &'a Self::Ty,
) -> impl Iterator<Item = (Self::Ty, PrivateUninhabitedField)> + ExactSizeIterator + Captures<'a>
{
ty.sub_tys(ctor).into_iter().map(|ty| (ty, PrivateUninhabitedField(false)))
}
fn ctors_for_ty(&self, ty: &Self::Ty) -> Result<ConstructorSet<Self>, Self::Error> {
Ok(ty.ctor_set())
}
fn write_variant_name(
f: &mut std::fmt::Formatter<'_>,
ctor: &Constructor<Self>,
ty: &Self::Ty,
) -> std::fmt::Result {
ty.write_variant_name(f, ctor)
}
fn bug(&self, fmt: std::fmt::Arguments<'_>) -> Self::Error {
panic!("{}", fmt)
}
/// Abort when reaching the complexity limit. This is what we'll check in tests.
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
Err(())
}
}
/// Construct a single pattern; see `pats!()`.
#[allow(unused_macros)]
macro_rules! pat {
($($rest:tt)*) => {{
let mut vec = pats!($($rest)*);
vec.pop().unwrap()
}};
}
/// A macro to construct patterns. Called like `pats!(type_expr; pattern, pattern, ..)` and returns
/// a `Vec<DeconstructedPat>`. A pattern can be nested and looks like `Constructor(pat, pat)` or
/// `Constructor { .i: pat, .j: pat }`, where `Constructor` is `Struct`, `Variant.i` (with index
/// `i`), as well as booleans and integer ranges.
///
/// The general structure of the macro is a tt-muncher with several stages identified with
/// `@something(args)`. The args are a key-value list (the keys ensure we don't mix the arguments
/// around) which is passed down and modified as needed. We then parse token-trees from
/// left-to-right. Non-trivial recursion happens when we parse the arguments to a pattern: we
/// recurse to parse the tokens inside `{..}`/`(..)`, and then we continue parsing anything that
/// follows.
macro_rules! pats {
// Entrypoint
// Parse `type; ..`
($ty:expr; $($rest:tt)*) => {{
#[allow(unused_imports)]
use rustc_pattern_analysis::{
constructor::{Constructor, IntRange, MaybeInfiniteInt, RangeEnd},
pat::DeconstructedPat,
};
let ty = $ty;
// The heart of the macro is designed to push `IndexedPat`s into a `Vec`, so we work around
// that.
let sub_tys = ::std::iter::repeat(&ty);
let mut vec = Vec::new();
pats!(@ctor(vec:vec, sub_tys:sub_tys, idx:0) $($rest)*);
vec.into_iter().map(|ipat| ipat.pat).collect::<Vec<_>>()
}};
// Parse `constructor ..`
(@ctor($($args:tt)*) true $($rest:tt)*) => {{
let ctor = Constructor::Bool(true);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) false $($rest:tt)*) => {{
let ctor = Constructor::Bool(false);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) Struct $($rest:tt)*) => {{
let ctor = Constructor::Struct;
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) ( $($fields:tt)* ) $($rest:tt)*) => {{
let ctor = Constructor::Struct; // tuples
pats!(@pat($($args)*, ctor:ctor) ( $($fields)* ) $($rest)*)
}};
(@ctor($($args:tt)*) Variant.$variant:ident $($rest:tt)*) => {{
let ctor = Constructor::Variant($variant);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) Variant.$variant:literal $($rest:tt)*) => {{
let ctor = Constructor::Variant($variant);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) _ $($rest:tt)*) => {{
let ctor = Constructor::Wildcard;
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
// Integers and int ranges
(@ctor($($args:tt)*) $($start:literal)?..$end:literal $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $($start)?),
pats!(@rangeboundary+ $end),
RangeEnd::Excluded,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) $($start:literal)?.. $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $($start)?),
pats!(@rangeboundary+),
RangeEnd::Excluded,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) $($start:literal)?..=$end:literal $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $($start)?),
pats!(@rangeboundary+ $end),
RangeEnd::Included,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) $int:literal $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $int),
pats!(@rangeboundary+ $int),
RangeEnd::Included,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
// Utility to manage range boundaries.
(@rangeboundary $sign:tt $int:literal) => { MaybeInfiniteInt::new_finite_uint($int) };
(@rangeboundary -) => { MaybeInfiniteInt::NegInfinity };
(@rangeboundary +) => { MaybeInfiniteInt::PosInfinity };
// Parse subfields: `(..)` or `{..}`
// Constructor with no fields, e.g. `bool` or `Variant.1`.
(@pat($($args:tt)*) $(,)?) => {
pats!(@pat($($args)*) {})
};
(@pat($($args:tt)*) , $($rest:tt)*) => {
pats!(@pat($($args)*) {}, $($rest)*)
};
// `(..)` and `{..}` are treated the same.
(@pat($($args:tt)*) ( $($subpat:tt)* ) $($rest:tt)*) => {{
pats!(@pat($($args)*) { $($subpat)* } $($rest)*)
}};
(@pat(vec:$vec:expr, sub_tys:$sub_tys:expr, idx:$idx:expr, ctor:$ctor:expr) { $($fields:tt)* } $($rest:tt)*) => {{
let sub_tys = $sub_tys;
let index = $idx;
// Silly dance to work with both a vec and `iter::repeat()`.
let ty = *(&sub_tys).clone().into_iter().nth(index).unwrap();
let ctor = $ctor;
let ctor_sub_tys = &ty.sub_tys(&ctor);
#[allow(unused_mut)]
let mut fields = Vec::new();
// Parse subpatterns (note the leading comma).
pats!(@fields(idx:0, vec:fields, sub_tys:ctor_sub_tys) ,$($fields)*);
let arity = ctor_sub_tys.len();
let pat = DeconstructedPat::new(ctor, fields, arity, ty, ()).at_index(index);
$vec.push(pat);
// Continue parsing further patterns.
pats!(@fields(idx:index+1, vec:$vec, sub_tys:sub_tys) $($rest)*);
}};
// Parse fields one by one.
// No fields left.
(@fields($($args:tt)*) $(,)?) => {};
// `.i: pat` sets the current index to `i`.
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:literal : $($rest:tt)*) => {{
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
}};
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:ident : $($rest:tt)*) => {{
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
}};
// Field without an explicit index; we use the current index which gets incremented above.
(@fields(idx:$idx:expr, $($args:tt)*) , $($rest:tt)*) => {{
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
}};
}

View file

@ -0,0 +1,109 @@
//! Test the pattern complexity limit.
use common::*;
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
#[macro_use]
mod common;
/// Analyze a match made of these patterns. Ignore the report; we only care whether we exceeded the
/// limit or not.
fn check(patterns: &[DeconstructedPat<Cx>], complexity_limit: usize) -> Result<(), ()> {
let ty = *patterns[0].ty();
let arms: Vec<_> =
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, Some(complexity_limit))
.map(|_report| ())
}
/// Asserts that analyzing this match takes exactly `complexity` steps.
#[track_caller]
fn assert_complexity(patterns: Vec<DeconstructedPat<Cx>>, complexity: usize) {
assert!(check(&patterns, complexity).is_ok());
assert!(check(&patterns, complexity - 1).is_err());
}
/// Construct a match like:
/// ```ignore(illustrative)
/// match ... {
/// BigStruct { field01: true, .. } => {}
/// BigStruct { field02: true, .. } => {}
/// BigStruct { field03: true, .. } => {}
/// BigStruct { field04: true, .. } => {}
/// ...
/// _ => {}
/// }
/// ```
fn diagonal_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
let mut patterns = vec![];
for i in 0..arity {
patterns.push(pat!(struct_ty; Struct { .i: true }));
}
patterns.push(pat!(struct_ty; _));
patterns
}
/// Construct a match like:
/// ```ignore(illustrative)
/// match ... {
/// BigStruct { field01: true, .. } => {}
/// BigStruct { field02: true, .. } => {}
/// BigStruct { field03: true, .. } => {}
/// BigStruct { field04: true, .. } => {}
/// ...
/// BigStruct { field01: false, .. } => {}
/// BigStruct { field02: false, .. } => {}
/// BigStruct { field03: false, .. } => {}
/// BigStruct { field04: false, .. } => {}
/// ...
/// _ => {}
/// }
/// ```
fn diagonal_exponential_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
let mut patterns = vec![];
for i in 0..arity {
patterns.push(pat!(struct_ty; Struct { .i: true }));
}
for i in 0..arity {
patterns.push(pat!(struct_ty; Struct { .i: false }));
}
patterns.push(pat!(struct_ty; _));
patterns
}
#[test]
fn test_diagonal_struct_match() {
// These cases are nicely linear: we check `arity` patterns with exactly one `true`, matching
// in 2 branches each, and a final pattern with all `false`, matching only the `_` branch.
assert_complexity(diagonal_match(20), 41);
assert_complexity(diagonal_match(30), 61);
// This case goes exponential.
assert!(check(&diagonal_exponential_match(10), 10000).is_err());
}
/// Construct a match like:
/// ```ignore(illustrative)
/// match ... {
/// BigEnum::Variant1(_) => {}
/// BigEnum::Variant2(_) => {}
/// BigEnum::Variant3(_) => {}
/// ...
/// _ => {}
/// }
/// ```
fn big_enum(arity: usize) -> Vec<DeconstructedPat<Cx>> {
let enum_ty = Ty::BigEnum { arity, ty: &Ty::Bool };
let mut patterns = vec![];
for i in 0..arity {
patterns.push(pat!(enum_ty; Variant.i));
}
patterns.push(pat!(enum_ty; _));
patterns
}
#[test]
fn test_big_enum() {
// We try 2 branches per variant.
assert_complexity(big_enum(20), 40);
}

View file

@ -0,0 +1,77 @@
//! Test exhaustiveness checking.
use common::*;
use rustc_pattern_analysis::{
pat::{DeconstructedPat, WitnessPat},
usefulness::PlaceValidity,
MatchArm,
};
#[macro_use]
mod common;
/// Analyze a match made of these patterns.
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<WitnessPat<Cx>> {
let ty = *patterns[0].ty();
let arms: Vec<_> =
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
let report =
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
report.non_exhaustiveness_witnesses
}
#[track_caller]
fn assert_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
let witnesses = check(patterns);
if !witnesses.is_empty() {
panic!("non-exaustive match: missing {witnesses:?}");
}
}
#[track_caller]
fn assert_non_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
let witnesses = check(patterns);
assert!(!witnesses.is_empty())
}
#[test]
fn test_int_ranges() {
let ty = Ty::U8;
assert_exhaustive(pats!(ty;
0..=255,
));
assert_exhaustive(pats!(ty;
0..,
));
assert_non_exhaustive(pats!(ty;
0..255,
));
assert_exhaustive(pats!(ty;
0..255,
255,
));
assert_exhaustive(pats!(ty;
..10,
10..
));
}
#[test]
fn test_nested() {
let ty = Ty::BigStruct { arity: 2, ty: &Ty::BigEnum { arity: 2, ty: &Ty::Bool } };
assert_non_exhaustive(pats!(ty;
Struct(Variant.0, _),
));
assert_exhaustive(pats!(ty;
Struct(Variant.0, _),
Struct(Variant.1, _),
));
assert_non_exhaustive(pats!(ty;
Struct(Variant.0, _),
Struct(_, Variant.0),
));
assert_exhaustive(pats!(ty;
Struct(Variant.0, _),
Struct(_, Variant.0),
Struct(Variant.1, Variant.1),
));
}

View file

@ -0,0 +1,69 @@
//! Test the computation of arm intersections.
use common::*;
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
#[macro_use]
mod common;
/// Analyze a match made of these patterns and returns the computed arm intersections.
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<Vec<usize>> {
let ty = *patterns[0].ty();
let arms: Vec<_> =
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
let report =
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
report.arm_intersections.into_iter().map(|bitset| bitset.iter().collect()).collect()
}
#[track_caller]
fn assert_intersects(patterns: Vec<DeconstructedPat<Cx>>, intersects: &[&[usize]]) {
let computed_intersects = check(patterns);
assert_eq!(computed_intersects, intersects);
}
#[test]
fn test_int_ranges() {
let ty = Ty::U8;
assert_intersects(
pats!(ty;
0..=100,
100..,
),
&[&[], &[0]],
);
assert_intersects(
pats!(ty;
0..=101,
100..,
),
&[&[], &[0]],
);
assert_intersects(
pats!(ty;
0..100,
100..,
),
&[&[], &[]],
);
}
#[test]
fn test_nested() {
let ty = Ty::Tuple(&[Ty::Bool; 2]);
assert_intersects(
pats!(ty;
(true, true),
(true, _),
(_, true),
),
&[&[], &[0], &[0, 1]],
);
// Here we shortcut because `(true, true)` is irrelevant, so we fail to detect the intersection.
assert_intersects(
pats!(ty;
(true, _),
(_, true),
),
&[&[], &[]],
);
}

View file

@ -1696,7 +1696,7 @@ fn check_mod_privacy(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
}
}
for id in module.items() {
for id in module.free_items() {
if let ItemKind::Impl(i) = tcx.hir().item(id).kind {
if let Some(item) = i.of_trait {
let trait_ref = tcx.impl_trait_ref(id.owner_id.def_id).unwrap();

View file

@ -1592,18 +1592,23 @@ fn smart_resolve_context_dependent_help(
match (res, source) {
(
Res::Def(DefKind::Macro(MacroKind::Bang), _),
Res::Def(DefKind::Macro(MacroKind::Bang), def_id),
PathSource::Expr(Some(Expr {
kind: ExprKind::Index(..) | ExprKind::Call(..), ..
}))
| PathSource::Struct,
) => {
// Don't suggest macro if it's unstable.
let suggestable = def_id.is_local()
|| self.r.tcx.lookup_stability(def_id).map_or(true, |s| s.is_stable());
err.span_label(span, fallback_label.to_string());
// Don't suggest `!` for a macro invocation if there are generic args
if path
.last()
.is_some_and(|segment| !segment.has_generic_args && !segment.has_lifetime_args)
&& suggestable
{
err.span_suggestion_verbose(
span.shrink_to_hi(),

View file

@ -22,7 +22,7 @@
use rustc_target::abi::Align;
use rustc_target::spec::LinkSelfContainedComponents;
use rustc_target::spec::{PanicStrategy, RelocModel, SanitizerSet, SplitDebuginfo};
use rustc_target::spec::{Target, TargetTriple, TargetWarnings, TARGETS};
use rustc_target::spec::{Target, TargetTriple, TARGETS};
use std::collections::btree_map::{
Iter as BTreeMapIter, Keys as BTreeMapKeysIter, Values as BTreeMapValuesIter,
};
@ -1549,34 +1549,25 @@ pub fn build_configuration(sess: &Session, mut user_cfg: Cfg) -> Cfg {
user_cfg
}
pub fn build_target_config(
early_dcx: &EarlyDiagCtxt,
opts: &Options,
target_override: Option<Target>,
sysroot: &Path,
) -> Target {
let target_result = target_override.map_or_else(
|| Target::search(&opts.target_triple, sysroot),
|t| Ok((t, TargetWarnings::empty())),
);
let (target, target_warnings) = target_result.unwrap_or_else(|e| {
early_dcx.early_fatal(format!(
pub fn build_target_config(early_dcx: &EarlyDiagCtxt, opts: &Options, sysroot: &Path) -> Target {
match Target::search(&opts.target_triple, sysroot) {
Ok((target, warnings)) => {
for warning in warnings.warning_messages() {
early_dcx.early_warn(warning)
}
if !matches!(target.pointer_width, 16 | 32 | 64) {
early_dcx.early_fatal(format!(
"target specification was invalid: unrecognized target-pointer-width {}",
target.pointer_width
))
}
target
}
Err(e) => early_dcx.early_fatal(format!(
"Error loading target specification: {e}. \
Run `rustc --print target-list` for a list of built-in targets"
))
});
for warning in target_warnings.warning_messages() {
early_dcx.early_warn(warning)
Run `rustc --print target-list` for a list of built-in targets"
)),
}
if !matches!(target.pointer_width, 16 | 32 | 64) {
early_dcx.early_fatal(format!(
"target specification was invalid: unrecognized target-pointer-width {}",
target.pointer_width
))
}
target
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]

View file

@ -1008,7 +1008,7 @@ pub fn build_session(
fluent_resources: Vec<&'static str>,
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
target_cfg: Target,
target: Target,
sysroot: PathBuf,
cfg_version: &'static str,
ice_file: Option<PathBuf>,
@ -1036,7 +1036,7 @@ pub fn build_session(
let loader = file_loader.unwrap_or_else(|| Box::new(RealFileLoader));
let hash_kind = sopts.unstable_opts.src_hash_algorithm.unwrap_or_else(|| {
if target_cfg.is_like_msvc {
if target.is_like_msvc {
SourceFileHashAlgorithm::Sha256
} else {
SourceFileHashAlgorithm::Md5
@ -1117,11 +1117,10 @@ pub fn build_session(
_ => CtfeBacktrace::Disabled,
});
let asm_arch =
if target_cfg.allow_asm { InlineAsmArch::from_str(&target_cfg.arch).ok() } else { None };
let asm_arch = if target.allow_asm { InlineAsmArch::from_str(&target.arch).ok() } else { None };
let sess = Session {
target: target_cfg,
target,
host,
opts: sopts,
host_tlib_path,

View file

@ -25,7 +25,7 @@ pub fn report_symbol_names(tcx: TyCtxt<'_>) {
let mut symbol_names = SymbolNamesTest { tcx };
let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() {
for id in crate_items.free_items() {
symbol_names.process_attrs(id.owner_id.def_id);
}

View file

@ -2092,6 +2092,9 @@ pub struct TargetOptions {
/// compiling `rustc` will be used instead (or llvm if it is not set).
///
/// N.B. when *using* the compiler, backend can always be overridden with `-Zcodegen-backend`.
///
/// This was added by WaffleLapkin in #116793. The motivation is a rustc fork that requires a
/// custom codegen backend for a particular target.
pub default_codegen_backend: Option<StaticCow<str>>,
/// Whether to generate trap instructions in places where optimization would

View file

@ -19,6 +19,9 @@ trait_selection_closure_kind_mismatch = expected a closure that implements the `
trait_selection_closure_kind_requirement = the requirement to implement `{$trait_prefix}{$expected}` derives from here
trait_selection_disallowed_positional_argument = positional format arguments are not allowed here
.help = only named format arguments with the name of one of the generic types are allowed in this context
trait_selection_dump_vtable_entries = vtable entries for `{$trait_ref}`: {$entries}
trait_selection_empty_on_clause_in_rustc_on_unimplemented = empty `on`-clause in `#[rustc_on_unimplemented]`
@ -30,6 +33,9 @@ trait_selection_ignored_diagnostic_option = `{$option_name}` is ignored due to p
trait_selection_inherent_projection_normalization_overflow = overflow evaluating associated type `{$ty}`
trait_selection_invalid_format_specifier = invalid format specifier
.help = no format specifier are supported in this position
trait_selection_invalid_on_clause_in_rustc_on_unimplemented = invalid `on`-clause in `#[rustc_on_unimplemented]`
.label = invalid on-clause here
@ -60,3 +66,6 @@ trait_selection_unable_to_construct_constant_value = unable to construct a const
trait_selection_unknown_format_parameter_for_on_unimplemented_attr = there is no parameter `{$argument_name}` on trait `{$trait_name}`
.help = expect either a generic argument name or {"`{Self}`"} as format argument
trait_selection_wrapped_parser_error = {$description}
.label = {$label}

View file

@ -367,6 +367,23 @@ pub struct UnknownFormatParameterForOnUnimplementedAttr {
trait_name: Symbol,
}
#[derive(LintDiagnostic)]
#[diag(trait_selection_disallowed_positional_argument)]
#[help]
pub struct DisallowedPositionalArgument;
#[derive(LintDiagnostic)]
#[diag(trait_selection_invalid_format_specifier)]
#[help]
pub struct InvalidFormatSpecifier;
#[derive(LintDiagnostic)]
#[diag(trait_selection_wrapped_parser_error)]
pub struct WrappedParserError {
description: String,
label: String,
}
impl<'tcx> OnUnimplementedDirective {
fn parse(
tcx: TyCtxt<'tcx>,
@ -758,64 +775,108 @@ fn verify(&self, tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<(), ErrorGuara
let trait_name = tcx.item_name(trait_def_id);
let generics = tcx.generics_of(item_def_id);
let s = self.symbol.as_str();
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let mut parser = Parser::new(s, None, None, false, ParseMode::Format);
let mut result = Ok(());
for token in parser {
for token in &mut parser {
match token {
Piece::String(_) => (), // Normal string, no need to check it
Piece::NextArgument(a) => match a.position {
Position::ArgumentNamed(s) => {
match Symbol::intern(s) {
// `{ThisTraitsName}` is allowed
s if s == trait_name && !self.is_diagnostic_namespace_variant => (),
s if ALLOWED_FORMAT_SYMBOLS.contains(&s)
&& !self.is_diagnostic_namespace_variant =>
{
()
}
// So is `{A}` if A is a type parameter
s if generics.params.iter().any(|param| param.name == s) => (),
s => {
if self.is_diagnostic_namespace_variant {
tcx.emit_node_span_lint(
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
self.span,
UnknownFormatParameterForOnUnimplementedAttr {
argument_name: s,
trait_name,
},
);
} else {
result = Err(struct_span_code_err!(
tcx.dcx(),
self.span,
E0230,
"there is no parameter `{}` on {}",
s,
if trait_def_id == item_def_id {
format!("trait `{trait_name}`")
} else {
"impl".to_string()
}
)
.emit());
Piece::NextArgument(a) => {
let format_spec = a.format;
if self.is_diagnostic_namespace_variant
&& (format_spec.ty_span.is_some()
|| format_spec.width_span.is_some()
|| format_spec.precision_span.is_some()
|| format_spec.fill_span.is_some())
{
tcx.emit_node_span_lint(
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
self.span,
InvalidFormatSpecifier,
);
}
match a.position {
Position::ArgumentNamed(s) => {
match Symbol::intern(s) {
// `{ThisTraitsName}` is allowed
s if s == trait_name && !self.is_diagnostic_namespace_variant => (),
s if ALLOWED_FORMAT_SYMBOLS.contains(&s)
&& !self.is_diagnostic_namespace_variant =>
{
()
}
// So is `{A}` if A is a type parameter
s if generics.params.iter().any(|param| param.name == s) => (),
s => {
if self.is_diagnostic_namespace_variant {
tcx.emit_node_span_lint(
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
self.span,
UnknownFormatParameterForOnUnimplementedAttr {
argument_name: s,
trait_name,
},
);
} else {
result = Err(struct_span_code_err!(
tcx.dcx(),
self.span,
E0230,
"there is no parameter `{}` on {}",
s,
if trait_def_id == item_def_id {
format!("trait `{trait_name}`")
} else {
"impl".to_string()
}
)
.emit());
}
}
}
}
// `{:1}` and `{}` are not to be used
Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => {
if self.is_diagnostic_namespace_variant {
tcx.emit_node_span_lint(
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
self.span,
DisallowedPositionalArgument,
);
} else {
let reported = struct_span_code_err!(
tcx.dcx(),
self.span,
E0231,
"only named generic parameters are allowed"
)
.emit();
result = Err(reported);
}
}
}
// `{:1}` and `{}` are not to be used
Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => {
let reported = struct_span_code_err!(
tcx.dcx(),
self.span,
E0231,
"only named generic parameters are allowed"
)
.emit();
result = Err(reported);
}
},
}
}
}
// we cannot return errors from processing the format string as hard error here
// as the diagnostic namespace gurantees that malformed input cannot cause an error
//
// if we encounter any error while processing we nevertheless want to show it as warning
// so that users are aware that something is not correct
for e in parser.errors {
if self.is_diagnostic_namespace_variant {
tcx.emit_node_span_lint(
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
self.span,
WrappedParserError { description: e.description, label: e.label },
);
} else {
let reported =
struct_span_code_err!(tcx.dcx(), self.span, E0231, "{}", e.description,).emit();
result = Err(reported);
}
}
@ -853,9 +914,9 @@ pub fn format(
let empty_string = String::new();
let s = self.symbol.as_str();
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let mut parser = Parser::new(s, None, None, false, ParseMode::Format);
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
parser
let constructed_message = (&mut parser)
.map(|p| match p {
Piece::String(s) => s.to_owned(),
Piece::NextArgument(a) => match a.position {
@ -895,9 +956,29 @@ pub fn format(
}
}
}
Position::ArgumentImplicitlyIs(_) if self.is_diagnostic_namespace_variant => {
String::from("{}")
}
Position::ArgumentIs(idx) if self.is_diagnostic_namespace_variant => {
format!("{{{idx}}}")
}
_ => bug!("broken on_unimplemented {:?} - bad format arg", self.symbol),
},
})
.collect()
.collect();
// we cannot return errors from processing the format string as hard error here
// as the diagnostic namespace gurantees that malformed input cannot cause an error
//
// if we encounter any error while processing the format string
// we don't want to show the potentially half assembled formated string,
// therefore we fall back to just showing the input string in this case
//
// The actual parser errors are emitted earlier
// as lint warnings in OnUnimplementedFormatString::verify
if self.is_diagnostic_namespace_variant && !parser.errors.is_empty() {
String::from(s)
} else {
constructed_message
}
}
}

View file

@ -1714,6 +1714,18 @@ macro_rules! trace_macros {
builtin # type_ascribe($expr, $ty)
}
#[cfg(not(bootstrap))]
/// Unstable placeholder for deref patterns.
#[allow_internal_unstable(builtin_syntax)]
#[unstable(
feature = "deref_patterns",
issue = "87121",
reason = "placeholder syntax for deref patterns"
)]
pub macro deref($pat:pat) {
builtin # deref($pat)
}
/// Unstable implementation detail of the `rustc` compiler, do not use.
#[rustc_builtin_macro]
#[stable(feature = "rust1", since = "1.0.0")]

View file

@ -103,3 +103,11 @@
reason = "placeholder syntax for type ascription"
)]
pub use crate::macros::builtin::type_ascribe;
#[cfg(not(bootstrap))]
#[unstable(
feature = "deref_patterns",
issue = "87121",
reason = "placeholder syntax for deref patterns"
)]
pub use crate::macros::builtin::deref;

View file

@ -91,6 +91,15 @@
)]
pub use core::prelude::v1::type_ascribe;
#[cfg(not(bootstrap))]
// Do not `doc(no_inline)` either.
#[unstable(
feature = "deref_patterns",
issue = "87121",
reason = "placeholder syntax for deref patterns"
)]
pub use core::prelude::v1::deref;
// The file so far is equivalent to core/src/prelude/v1.rs. It is duplicated
// rather than glob imported because we want docs to show these re-exports as
// pointing to within `std`.

View file

@ -329,6 +329,7 @@ pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ")
),
PatKind::Box(p) => return name_from_pat(&*p),
PatKind::Deref(p) => format!("deref!({})", name_from_pat(&*p)),
PatKind::Ref(p, _) => return name_from_pat(&*p),
PatKind::Lit(..) => {
warn!(

View file

@ -55,7 +55,7 @@ fn array_rec(pats: &[Pat<'_>]) -> bool {
| PatKind::Err(_) => false,
PatKind::Struct(_, a, etc) => !etc && a.iter().all(|x| unary_pattern(x.pat)),
PatKind::Tuple(a, etc) | PatKind::TupleStruct(_, a, etc) => etc.as_opt_usize().is_none() && array_rec(a),
PatKind::Ref(x, _) | PatKind::Box(x) => unary_pattern(x),
PatKind::Ref(x, _) | PatKind::Box(x) | PatKind::Deref(x) => unary_pattern(x),
PatKind::Path(_) | PatKind::Lit(_) => true,
}
}

View file

@ -243,7 +243,7 @@ impl<'a> NormalizedPat<'a> {
fn from_pat(cx: &LateContext<'_>, arena: &'a DroplessArena, pat: &'a Pat<'_>) -> Self {
match pat.kind {
PatKind::Wild | PatKind::Binding(.., None) => Self::Wild,
PatKind::Binding(.., Some(pat)) | PatKind::Box(pat) | PatKind::Ref(pat, _) => {
PatKind::Binding(.., Some(pat)) | PatKind::Box(pat) | PatKind::Deref(pat) | PatKind::Ref(pat, _) => {
Self::from_pat(cx, arena, pat)
},
PatKind::Never => Self::Never,

View file

@ -242,6 +242,8 @@ fn transform_with_focus_on_idx(alternatives: &mut ThinVec<P<Pat>>, focus_idx: us
|k| matches!(k, Box(_)),
|k| always_pat!(k, Box(p) => p),
),
// FIXME(deref_patterns): Should we merge patterns here?
Deref(_) => false,
// Transform `&mut x | ... | &mut y` into `&mut (x | y)`.
Ref(target, Mutability::Mut) => extend_with_matching(
target, start, alternatives,

View file

@ -689,6 +689,11 @@ macro_rules! kind {
kind!("Box({pat})");
self.pat(pat);
},
PatKind::Deref(pat) => {
bind!(self, pat);
kind!("Deref({pat})");
self.pat(pat);
},
PatKind::Ref(pat, muta) => {
bind!(self, pat);
kind!("Ref({pat}, Mutability::{muta:?})");

View file

@ -955,6 +955,7 @@ pub fn hash_pat(&mut self, pat: &Pat<'_>) {
}
},
PatKind::Box(pat) => self.hash_pat(pat),
PatKind::Deref(pat) => self.hash_pat(pat),
PatKind::Lit(expr) => self.hash_expr(expr),
PatKind::Or(pats) => {
for pat in pats {

View file

@ -1678,7 +1678,7 @@ fn are_refutable<'a, I: IntoIterator<Item = &'a Pat<'a>>>(cx: &LateContext<'_>,
match pat.kind {
PatKind::Wild | PatKind::Never => false, // If `!` typechecked then the type is empty, so not refutable.
PatKind::Binding(_, _, _, pat) => pat.map_or(false, |pat| is_refutable(cx, pat)),
PatKind::Box(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat),
PatKind::Box(pat) | PatKind::Deref(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat),
PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id),
PatKind::Or(pats) => {
// TODO: should be the honest check, that pats is exhaustive set

View file

@ -55,9 +55,10 @@ fn is_short_pattern_inner(pat: &ast::Pat) -> bool {
ast::PatKind::TupleStruct(_, ref path, ref subpats) => {
path.segments.len() <= 1 && subpats.len() <= 1
}
ast::PatKind::Box(ref p) | ast::PatKind::Ref(ref p, _) | ast::PatKind::Paren(ref p) => {
is_short_pattern_inner(&*p)
}
ast::PatKind::Box(ref p)
| PatKind::Deref(ref p)
| ast::PatKind::Ref(ref p, _)
| ast::PatKind::Paren(ref p) => is_short_pattern_inner(&*p),
PatKind::Or(ref pats) => pats.iter().all(|p| is_short_pattern_inner(p)),
}
}
@ -277,6 +278,7 @@ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String>
.rewrite(context, shape.offset_left(1)?.sub_width(1)?)
.map(|inner_pat| format!("({})", inner_pat)),
PatKind::Err(_) => None,
PatKind::Deref(_) => None,
}
}
}

View file

@ -68,7 +68,7 @@ fn after_analysis<'tcx>(
let mut bodies = Vec::new();
let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() {
for id in crate_items.free_items() {
if matches!(tcx.def_kind(id.owner_id), DefKind::Fn) {
bodies.push(id.owner_id);
}

View file

@ -0,0 +1,45 @@
#[diagnostic::on_unimplemented(message = "{{Test } thing")]
//~^WARN unmatched `}` found
//~|WARN unmatched `}` found
trait ImportantTrait1 {}
#[diagnostic::on_unimplemented(message = "Test {}")]
//~^WARN positional format arguments are not allowed here
//~|WARN positional format arguments are not allowed here
trait ImportantTrait2 {}
#[diagnostic::on_unimplemented(message = "Test {1:}")]
//~^WARN positional format arguments are not allowed here
//~|WARN positional format arguments are not allowed here
trait ImportantTrait3 {}
#[diagnostic::on_unimplemented(message = "Test {Self:123}")]
//~^WARN invalid format specifier
//~|WARN invalid format specifier
trait ImportantTrait4 {}
#[diagnostic::on_unimplemented(message = "Test {Self:!}")]
//~^WARN expected `'}'`, found `'!'`
//~|WARN expected `'}'`, found `'!'`
//~|WARN unmatched `}` found
//~|WARN unmatched `}` found
trait ImportantTrait5 {}
fn check_1(_: impl ImportantTrait1) {}
fn check_2(_: impl ImportantTrait2) {}
fn check_3(_: impl ImportantTrait3) {}
fn check_4(_: impl ImportantTrait4) {}
fn check_5(_: impl ImportantTrait5) {}
fn main() {
check_1(());
//~^ERROR {{Test } thing
check_2(());
//~^ERROR Test {}
check_3(());
//~^ERROR Test {1}
check_4(());
//~^ERROR Test ()
check_5(());
//~^ERROR Test {Self:!}
}

View file

@ -0,0 +1,193 @@
warning: unmatched `}` found
--> $DIR/broken_format.rs:1:32
|
LL | #[diagnostic::on_unimplemented(message = "{{Test } thing")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `#[warn(unknown_or_malformed_diagnostic_attributes)]` on by default
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:6:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {}")]
| ^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:11:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {1:}")]
| ^^^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
warning: invalid format specifier
--> $DIR/broken_format.rs:16:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:123}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: no format specifier are supported in this position
warning: expected `'}'`, found `'!'`
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
warning: unmatched `}` found
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
warning: unmatched `}` found
--> $DIR/broken_format.rs:1:32
|
LL | #[diagnostic::on_unimplemented(message = "{{Test } thing")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: {{Test } thing
--> $DIR/broken_format.rs:35:13
|
LL | check_1(());
| ------- ^^ the trait `ImportantTrait1` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:4:1
|
LL | trait ImportantTrait1 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_1`
--> $DIR/broken_format.rs:28:20
|
LL | fn check_1(_: impl ImportantTrait1) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_1`
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:6:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {}")]
| ^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test {}
--> $DIR/broken_format.rs:37:13
|
LL | check_2(());
| ------- ^^ the trait `ImportantTrait2` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:9:1
|
LL | trait ImportantTrait2 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_2`
--> $DIR/broken_format.rs:29:20
|
LL | fn check_2(_: impl ImportantTrait2) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_2`
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:11:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {1:}")]
| ^^^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test {1}
--> $DIR/broken_format.rs:39:13
|
LL | check_3(());
| ------- ^^ the trait `ImportantTrait3` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:14:1
|
LL | trait ImportantTrait3 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_3`
--> $DIR/broken_format.rs:30:20
|
LL | fn check_3(_: impl ImportantTrait3) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_3`
warning: invalid format specifier
--> $DIR/broken_format.rs:16:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:123}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: no format specifier are supported in this position
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test ()
--> $DIR/broken_format.rs:41:13
|
LL | check_4(());
| ------- ^^ the trait `ImportantTrait4` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:19:1
|
LL | trait ImportantTrait4 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_4`
--> $DIR/broken_format.rs:31:20
|
LL | fn check_4(_: impl ImportantTrait4) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_4`
warning: expected `'}'`, found `'!'`
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
warning: unmatched `}` found
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test {Self:!}
--> $DIR/broken_format.rs:43:13
|
LL | check_5(());
| ------- ^^ the trait `ImportantTrait5` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:26:1
|
LL | trait ImportantTrait5 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_5`
--> $DIR/broken_format.rs:32:20
|
LL | fn check_5(_: impl ImportantTrait5) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_5`
error: aborting due to 5 previous errors; 12 warnings emitted
For more information about this error, try `rustc --explain E0277`.

View file

@ -0,0 +1,12 @@
macro_rules! mac {
($attr_item: meta) => {
#[cfg($attr_item)]
//~^ ERROR expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
//~| ERROR expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
struct S;
}
}
mac!(an(arbitrary token stream));
fn main() {}

View file

@ -0,0 +1,25 @@
error: expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
--> $DIR/attr-bad-meta-4.rs:3:15
|
LL | #[cfg($attr_item)]
| ^^^^^^^^^^
...
LL | mac!(an(arbitrary token stream));
| -------------------------------- in this macro invocation
|
= note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
error: expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
--> $DIR/attr-bad-meta-4.rs:3:15
|
LL | #[cfg($attr_item)]
| ^^^^^^^^^^
...
LL | mac!(an(arbitrary token stream));
| -------------------------------- in this macro invocation
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
= note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 2 previous errors

View file

@ -7,19 +7,19 @@
fn main() {
let vec: Vec<u32> = Vec::new();
match vec {
box [..] => {}
deref!([..]) => {}
_ => {}
}
match Box::new(true) {
box true => {}
deref!(true) => {}
_ => {}
}
match &Box::new(true) {
box true => {}
deref!(true) => {}
_ => {}
}
match &Rc::new(0) {
box (1..) => {}
deref!(1..) => {}
_ => {}
}
// FIXME(deref_patterns): fails to typecheck because `"foo"` has type &str but deref creates a

View file

@ -0,0 +1,25 @@
//! This test used to ICE because, while an error was emitted,
//! we still tried to remap generic params used in the hidden type
//! to the ones of the opaque type definition.
//@ edition: 2021
#![feature(type_alias_impl_trait)]
use std::future::Future;
type FutNothing<'a> = impl 'a + Future<Output = ()>;
//~^ ERROR: unconstrained opaque type
async fn operation(_: &mut ()) -> () {
//~^ ERROR: concrete type differs from previous
call(operation).await
}
async fn call<F>(_f: F)
where
for<'any> F: FnMut(&'any mut ()) -> FutNothing<'any>,
{
//~^ ERROR: expected generic lifetime parameter, found `'any`
}
fn main() {}

View file

@ -0,0 +1,34 @@
error: unconstrained opaque type
--> $DIR/hkl_forbidden4.rs:10:23
|
LL | type FutNothing<'a> = impl 'a + Future<Output = ()>;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `FutNothing` must be used in combination with a concrete type within the same module
error: concrete type differs from previous defining opaque type use
--> $DIR/hkl_forbidden4.rs:13:1
|
LL | async fn operation(_: &mut ()) -> () {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `FutNothing<'_>`, got `{async fn body@$DIR/hkl_forbidden4.rs:13:38: 16:2}`
|
note: previous use here
--> $DIR/hkl_forbidden4.rs:15:5
|
LL | call(operation).await
| ^^^^^^^^^^^^^^^
error[E0792]: expected generic lifetime parameter, found `'any`
--> $DIR/hkl_forbidden4.rs:21:1
|
LL | type FutNothing<'a> = impl 'a + Future<Output = ()>;
| -- this generic parameter must be used with a generic lifetime parameter
...
LL | / {
LL | |
LL | | }
| |_^
error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0792`.

View file

@ -26,11 +26,11 @@ note: closure parameter defined here
LL | let mut closure = expect_sig(|p, y| *p = y);
| ^
error[E0425]: cannot find function `deref` in this scope
error[E0423]: expected function, found macro `deref`
--> $DIR/unboxed-closures-type-mismatch-closure-from-another-scope.rs:13:5
|
LL | deref(p);
| ^^^^^ not found in this scope
| ^^^^^ not a function
|
help: use the `.` operator to call the method `Deref::deref` on `&&()`
|
@ -40,5 +40,5 @@ LL + p.deref();
error: aborting due to 4 previous errors
Some errors have detailed explanations: E0308, E0425.
Some errors have detailed explanations: E0308, E0423, E0425.
For more information about an error, try `rustc --explain E0308`.