Auto merge of #120112 - matthiaskrgr:rollup-48o3919, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #119582 (bootstrap: handle vendored sources when remapping crate paths)
 - #119730 (docs: fix typos)
 - #119828 (Improved collapse_debuginfo attribute, added command-line flag)
 - #119869 (replace `track_errors` usages with bubbling up `ErrorGuaranteed`)
 - #120037 (Remove `next_root_ty_var`)
 - #120094 (tests/ui/asm/inline-syntax: adapt for LLVM 18)
 - #120096 (Set RUSTC_BOOTSTRAP=1 consistently)
 - #120101 (change `.unwrap()` to `?` on write where `fmt::Result` is returned)
 - #120102 (Fix typo in munmap_partial.rs)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-01-19 08:42:17 +00:00
commit 92d727796b
61 changed files with 767 additions and 251 deletions

View file

@ -16,6 +16,9 @@ expand_attributes_wrong_form =
expand_cannot_be_name_of_macro =
`{$trait_ident}` cannot be a name of {$macro_type} macro
expand_collapse_debuginfo_illegal =
illegal value for attribute #[collapse_debuginfo(no|external|yes)]
expand_count_repetition_misplaced =
`count` can not be placed inside the inner-most repetition

View file

@ -1,5 +1,6 @@
#![deny(rustc::untranslatable_diagnostic)]
use crate::base::ast::NestedMetaItem;
use crate::errors;
use crate::expand::{self, AstFragment, Invocation};
use crate::module::DirOwnership;
@ -19,6 +20,7 @@
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics, RegisteredTools};
use rustc_parse::{parser, MACRO_ARGUMENTS};
use rustc_session::config::CollapseMacroDebuginfo;
use rustc_session::errors::report_lit_error;
use rustc_session::{parse::ParseSess, Limit, Session};
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
@ -761,6 +763,55 @@ pub fn default(kind: SyntaxExtensionKind, edition: Edition) -> SyntaxExtension {
}
}
fn collapse_debuginfo_by_name(sess: &Session, attr: &Attribute) -> CollapseMacroDebuginfo {
use crate::errors::CollapseMacroDebuginfoIllegal;
// #[collapse_debuginfo] without enum value (#[collapse_debuginfo(no/external/yes)])
// considered as `yes`
attr.meta_item_list().map_or(CollapseMacroDebuginfo::Yes, |l| {
let [NestedMetaItem::MetaItem(item)] = &l[..] else {
sess.dcx().emit_err(CollapseMacroDebuginfoIllegal { span: attr.span });
return CollapseMacroDebuginfo::Unspecified;
};
if !item.is_word() {
sess.dcx().emit_err(CollapseMacroDebuginfoIllegal { span: item.span });
CollapseMacroDebuginfo::Unspecified
} else {
match item.name_or_empty() {
sym::no => CollapseMacroDebuginfo::No,
sym::external => CollapseMacroDebuginfo::External,
sym::yes => CollapseMacroDebuginfo::Yes,
_ => {
sess.dcx().emit_err(CollapseMacroDebuginfoIllegal { span: item.span });
CollapseMacroDebuginfo::Unspecified
}
}
}
})
}
/// if-ext - if macro from different crate (related to callsite code)
/// | cmd \ attr | no | (unspecified) | external | yes |
/// | no | no | no | no | no |
/// | (unspecified) | no | no | if-ext | yes |
/// | external | no | if-ext | if-ext | yes |
/// | yes | yes | yes | yes | yes |
fn get_collapse_debuginfo(sess: &Session, attrs: &[ast::Attribute], is_local: bool) -> bool {
let collapse_debuginfo_attr = attr::find_by_name(attrs, sym::collapse_debuginfo)
.map(|v| Self::collapse_debuginfo_by_name(sess, v))
.unwrap_or(CollapseMacroDebuginfo::Unspecified);
let flag = sess.opts.unstable_opts.collapse_macro_debuginfo;
let attr = collapse_debuginfo_attr;
let ext = !is_local;
#[rustfmt::skip]
let collapse_table = [
[false, false, false, false],
[false, false, ext, true],
[false, ext, ext, true],
[true, true, true, true],
];
collapse_table[flag as usize][attr as usize]
}
/// Constructs a syntax extension with the given properties
/// and other properties converted from attributes.
pub fn new(
@ -772,6 +823,7 @@ pub fn new(
edition: Edition,
name: Symbol,
attrs: &[ast::Attribute],
is_local: bool,
) -> SyntaxExtension {
let allow_internal_unstable =
attr::allow_internal_unstable(sess, attrs).collect::<Vec<Symbol>>();
@ -780,8 +832,8 @@ pub fn new(
let local_inner_macros = attr::find_by_name(attrs, sym::macro_export)
.and_then(|macro_export| macro_export.meta_item_list())
.is_some_and(|l| attr::list_contains_name(&l, sym::local_inner_macros));
let collapse_debuginfo = attr::contains_name(attrs, sym::collapse_debuginfo);
tracing::debug!(?local_inner_macros, ?collapse_debuginfo, ?allow_internal_unsafe);
let collapse_debuginfo = Self::get_collapse_debuginfo(sess, attrs, is_local);
tracing::debug!(?name, ?local_inner_macros, ?collapse_debuginfo, ?allow_internal_unsafe);
let (builtin_name, helper_attrs) = attr::find_by_name(attrs, sym::rustc_builtin_macro)
.map(|attr| {

View file

@ -58,6 +58,13 @@ pub(crate) struct ResolveRelativePath {
pub path: String,
}
#[derive(Diagnostic)]
#[diag(expand_collapse_debuginfo_illegal)]
pub(crate) struct CollapseMacroDebuginfoIllegal {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(expand_macro_const_stability)]
pub(crate) struct MacroConstStability {

View file

@ -367,6 +367,7 @@ pub fn compile_declarative_macro(
edition,
def.ident.name,
&def.attrs,
def.id != DUMMY_NODE_ID,
)
};
let dummy_syn_ext = || (mk_syn_ext(Box::new(macro_rules_dummy_expander)), Vec::new());

View file

@ -469,7 +469,7 @@ pub struct BuiltinAttribute {
// `#[collapse_debuginfo]`
gated!(
collapse_debuginfo, Normal, template!(Word), WarnFollowing,
collapse_debuginfo, Normal, template!(Word, List: "no|external|yes"), ErrorFollowing,
experimental!(collapse_debuginfo)
),

View file

@ -1446,7 +1446,7 @@ fn lookup_inherent_assoc_ty(
}
let candidates: Vec<_> = tcx
.inherent_impls(adt_did)
.inherent_impls(adt_did)?
.iter()
.filter_map(|&impl_| Some((impl_, self.lookup_assoc_ty_unchecked(name, block, impl_)?)))
.collect();

View file

@ -13,32 +13,41 @@
use rustc_middle::ty::fast_reject::{simplify_type, SimplifiedType, TreatParams};
use rustc_middle::ty::{self, CrateInherentImpls, Ty, TyCtxt};
use rustc_span::symbol::sym;
use rustc_span::ErrorGuaranteed;
use crate::errors;
/// On-demand query: yields a map containing all types mapped to their inherent impls.
pub fn crate_inherent_impls(tcx: TyCtxt<'_>, (): ()) -> CrateInherentImpls {
pub fn crate_inherent_impls(
tcx: TyCtxt<'_>,
(): (),
) -> Result<&'_ CrateInherentImpls, ErrorGuaranteed> {
let mut collect = InherentCollect { tcx, impls_map: Default::default() };
let mut res = Ok(());
for id in tcx.hir().items() {
collect.check_item(id);
res = res.and(collect.check_item(id));
}
collect.impls_map
res?;
Ok(tcx.arena.alloc(collect.impls_map))
}
pub fn crate_incoherent_impls(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] {
let crate_map = tcx.crate_inherent_impls(());
tcx.arena.alloc_from_iter(
pub fn crate_incoherent_impls(
tcx: TyCtxt<'_>,
simp: SimplifiedType,
) -> Result<&[DefId], ErrorGuaranteed> {
let crate_map = tcx.crate_inherent_impls(())?;
Ok(tcx.arena.alloc_from_iter(
crate_map.incoherent_impls.get(&simp).unwrap_or(&Vec::new()).iter().map(|d| d.to_def_id()),
)
))
}
/// On-demand query: yields a vector of the inherent impls for a specific type.
pub fn inherent_impls(tcx: TyCtxt<'_>, ty_def_id: LocalDefId) -> &[DefId] {
let crate_map = tcx.crate_inherent_impls(());
match crate_map.inherent_impls.get(&ty_def_id) {
pub fn inherent_impls(tcx: TyCtxt<'_>, ty_def_id: LocalDefId) -> Result<&[DefId], ErrorGuaranteed> {
let crate_map = tcx.crate_inherent_impls(())?;
Ok(match crate_map.inherent_impls.get(&ty_def_id) {
Some(v) => &v[..],
None => &[],
}
})
}
struct InherentCollect<'tcx> {
@ -47,14 +56,19 @@ struct InherentCollect<'tcx> {
}
impl<'tcx> InherentCollect<'tcx> {
fn check_def_id(&mut self, impl_def_id: LocalDefId, self_ty: Ty<'tcx>, ty_def_id: DefId) {
fn check_def_id(
&mut self,
impl_def_id: LocalDefId,
self_ty: Ty<'tcx>,
ty_def_id: DefId,
) -> Result<(), ErrorGuaranteed> {
if let Some(ty_def_id) = ty_def_id.as_local() {
// Add the implementation to the mapping from implementation to base
// type def ID, if there is a base type for this implementation and
// the implementation does not have any associated traits.
let vec = self.impls_map.inherent_impls.entry(ty_def_id).or_default();
vec.push(impl_def_id.to_def_id());
return;
return Ok(());
}
if self.tcx.features().rustc_attrs {
@ -62,18 +76,16 @@ fn check_def_id(&mut self, impl_def_id: LocalDefId, self_ty: Ty<'tcx>, ty_def_id
if !self.tcx.has_attr(ty_def_id, sym::rustc_has_incoherent_inherent_impls) {
let impl_span = self.tcx.def_span(impl_def_id);
self.tcx.dcx().emit_err(errors::InherentTyOutside { span: impl_span });
return;
return Err(self.tcx.dcx().emit_err(errors::InherentTyOutside { span: impl_span }));
}
for &impl_item in items {
if !self.tcx.has_attr(impl_item, sym::rustc_allow_incoherent_impl) {
let impl_span = self.tcx.def_span(impl_def_id);
self.tcx.dcx().emit_err(errors::InherentTyOutsideRelevant {
return Err(self.tcx.dcx().emit_err(errors::InherentTyOutsideRelevant {
span: impl_span,
help_span: self.tcx.def_span(impl_item),
});
return;
}));
}
}
@ -82,24 +94,28 @@ fn check_def_id(&mut self, impl_def_id: LocalDefId, self_ty: Ty<'tcx>, ty_def_id
} else {
bug!("unexpected self type: {:?}", self_ty);
}
Ok(())
} else {
let impl_span = self.tcx.def_span(impl_def_id);
self.tcx.dcx().emit_err(errors::InherentTyOutsideNew { span: impl_span });
Err(self.tcx.dcx().emit_err(errors::InherentTyOutsideNew { span: impl_span }))
}
}
fn check_primitive_impl(&mut self, impl_def_id: LocalDefId, ty: Ty<'tcx>) {
fn check_primitive_impl(
&mut self,
impl_def_id: LocalDefId,
ty: Ty<'tcx>,
) -> Result<(), ErrorGuaranteed> {
let items = self.tcx.associated_item_def_ids(impl_def_id);
if !self.tcx.hir().rustc_coherence_is_core() {
if self.tcx.features().rustc_attrs {
for &impl_item in items {
if !self.tcx.has_attr(impl_item, sym::rustc_allow_incoherent_impl) {
let span = self.tcx.def_span(impl_def_id);
self.tcx.dcx().emit_err(errors::InherentTyOutsidePrimitive {
return Err(self.tcx.dcx().emit_err(errors::InherentTyOutsidePrimitive {
span,
help_span: self.tcx.def_span(impl_item),
});
return;
}));
}
}
} else {
@ -108,8 +124,7 @@ fn check_primitive_impl(&mut self, impl_def_id: LocalDefId, ty: Ty<'tcx>) {
if let ty::Ref(_, subty, _) = ty.kind() {
note = Some(errors::InherentPrimitiveTyNote { subty: *subty });
}
self.tcx.dcx().emit_err(errors::InherentPrimitiveTy { span, note });
return;
return Err(self.tcx.dcx().emit_err(errors::InherentPrimitiveTy { span, note }));
}
}
@ -118,11 +133,12 @@ fn check_primitive_impl(&mut self, impl_def_id: LocalDefId, ty: Ty<'tcx>) {
} else {
bug!("unexpected primitive type: {:?}", ty);
}
Ok(())
}
fn check_item(&mut self, id: hir::ItemId) {
fn check_item(&mut self, id: hir::ItemId) -> Result<(), ErrorGuaranteed> {
if !matches!(self.tcx.def_kind(id.owner_id), DefKind::Impl { of_trait: false }) {
return;
return Ok(());
}
let id = id.owner_id.def_id;
@ -132,10 +148,10 @@ fn check_item(&mut self, id: hir::ItemId) {
ty::Adt(def, _) => self.check_def_id(id, self_ty, def.did()),
ty::Foreign(did) => self.check_def_id(id, self_ty, did),
ty::Dynamic(data, ..) if data.principal_def_id().is_some() => {
self.check_def_id(id, self_ty, data.principal_def_id().unwrap());
self.check_def_id(id, self_ty, data.principal_def_id().unwrap())
}
ty::Dynamic(..) => {
self.tcx.dcx().emit_err(errors::InherentDyn { span: item_span });
Err(self.tcx.dcx().emit_err(errors::InherentDyn { span: item_span }))
}
ty::Bool
| ty::Char
@ -151,7 +167,7 @@ fn check_item(&mut self, id: hir::ItemId) {
| ty::FnPtr(_)
| ty::Tuple(..) => self.check_primitive_impl(id, self_ty),
ty::Alias(..) | ty::Param(_) => {
self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span });
Err(self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span }))
}
ty::FnDef(..)
| ty::Closure(..)
@ -162,7 +178,8 @@ fn check_item(&mut self, id: hir::ItemId) {
| ty::Infer(_) => {
bug!("unexpected impl self type of impl: {:?} {:?}", id, self_ty);
}
ty::Error(_) => {}
// We could bail out here, but that will silence other useful errors.
ty::Error(_) => Ok(()),
}
}
}

View file

@ -6,16 +6,18 @@
use rustc_index::IndexVec;
use rustc_middle::traits::specialization_graph::OverlapMode;
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::Symbol;
use rustc_span::{ErrorGuaranteed, Symbol};
use rustc_trait_selection::traits::{self, SkipLeakCheck};
use smallvec::SmallVec;
use std::collections::hash_map::Entry;
pub fn crate_inherent_impls_overlap_check(tcx: TyCtxt<'_>, (): ()) {
pub fn crate_inherent_impls_overlap_check(tcx: TyCtxt<'_>, (): ()) -> Result<(), ErrorGuaranteed> {
let mut inherent_overlap_checker = InherentOverlapChecker { tcx };
let mut res = Ok(());
for id in tcx.hir().items() {
inherent_overlap_checker.check_item(id);
res = res.and(inherent_overlap_checker.check_item(id));
}
res
}
struct InherentOverlapChecker<'tcx> {
@ -58,10 +60,11 @@ fn compare_hygienically(&self, item1: ty::AssocItem, item2: ty::AssocItem) -> bo
== item2.ident(self.tcx).normalize_to_macros_2_0()
}
fn check_for_duplicate_items_in_impl(&self, impl_: DefId) {
fn check_for_duplicate_items_in_impl(&self, impl_: DefId) -> Result<(), ErrorGuaranteed> {
let impl_items = self.tcx.associated_items(impl_);
let mut seen_items = FxHashMap::default();
let mut res = Ok(());
for impl_item in impl_items.in_definition_order() {
let span = self.tcx.def_span(impl_item.def_id);
let ident = impl_item.ident(self.tcx);
@ -70,7 +73,7 @@ fn check_for_duplicate_items_in_impl(&self, impl_: DefId) {
match seen_items.entry(norm_ident) {
Entry::Occupied(entry) => {
let former = entry.get();
struct_span_code_err!(
res = Err(struct_span_code_err!(
self.tcx.dcx(),
span,
E0592,
@ -79,13 +82,14 @@ fn check_for_duplicate_items_in_impl(&self, impl_: DefId) {
)
.with_span_label(span, format!("duplicate definitions for `{ident}`"))
.with_span_label(*former, format!("other definition for `{ident}`"))
.emit();
.emit());
}
Entry::Vacant(entry) => {
entry.insert(span);
}
}
}
res
}
fn check_for_common_items_in_impls(
@ -93,10 +97,11 @@ fn check_for_common_items_in_impls(
impl1: DefId,
impl2: DefId,
overlap: traits::OverlapResult<'_>,
) {
) -> Result<(), ErrorGuaranteed> {
let impl_items1 = self.tcx.associated_items(impl1);
let impl_items2 = self.tcx.associated_items(impl2);
let mut res = Ok(());
for &item1 in impl_items1.in_definition_order() {
let collision = impl_items2
.filter_by_name_unhygienic(item1.name)
@ -128,9 +133,10 @@ fn check_for_common_items_in_impls(
traits::add_placeholder_note(&mut err);
}
err.emit();
res = Err(err.emit());
}
}
res
}
fn check_for_overlapping_inherent_impls(
@ -138,7 +144,7 @@ fn check_for_overlapping_inherent_impls(
overlap_mode: OverlapMode,
impl1_def_id: DefId,
impl2_def_id: DefId,
) {
) -> Result<(), ErrorGuaranteed> {
let maybe_overlap = traits::overlapping_impls(
self.tcx,
impl1_def_id,
@ -150,17 +156,19 @@ fn check_for_overlapping_inherent_impls(
);
if let Some(overlap) = maybe_overlap {
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id, overlap);
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id, overlap)
} else {
Ok(())
}
}
fn check_item(&mut self, id: hir::ItemId) {
fn check_item(&mut self, id: hir::ItemId) -> Result<(), ErrorGuaranteed> {
let def_kind = self.tcx.def_kind(id.owner_id);
if !matches!(def_kind, DefKind::Enum | DefKind::Struct | DefKind::Trait | DefKind::Union) {
return;
return Ok(());
}
let impls = self.tcx.inherent_impls(id.owner_id);
let impls = self.tcx.inherent_impls(id.owner_id)?;
let overlap_mode = OverlapMode::get(self.tcx, id.owner_id.to_def_id());
@ -173,17 +181,18 @@ fn check_item(&mut self, id: hir::ItemId) {
// otherwise switch to an allocating algorithm with
// faster asymptotic runtime.
const ALLOCATING_ALGO_THRESHOLD: usize = 500;
let mut res = Ok(());
if impls.len() < ALLOCATING_ALGO_THRESHOLD {
for (i, &(&impl1_def_id, impl_items1)) in impls_items.iter().enumerate() {
self.check_for_duplicate_items_in_impl(impl1_def_id);
res = res.and(self.check_for_duplicate_items_in_impl(impl1_def_id));
for &(&impl2_def_id, impl_items2) in &impls_items[(i + 1)..] {
if self.impls_have_common_items(impl_items1, impl_items2) {
self.check_for_overlapping_inherent_impls(
res = res.and(self.check_for_overlapping_inherent_impls(
overlap_mode,
impl1_def_id,
impl2_def_id,
);
));
}
}
}
@ -315,20 +324,21 @@ struct ConnectedRegion {
impl_blocks.sort_unstable();
for (i, &impl1_items_idx) in impl_blocks.iter().enumerate() {
let &(&impl1_def_id, impl_items1) = &impls_items[impl1_items_idx];
self.check_for_duplicate_items_in_impl(impl1_def_id);
res = res.and(self.check_for_duplicate_items_in_impl(impl1_def_id));
for &impl2_items_idx in impl_blocks[(i + 1)..].iter() {
let &(&impl2_def_id, impl_items2) = &impls_items[impl2_items_idx];
if self.impls_have_common_items(impl_items1, impl_items2) {
self.check_for_overlapping_inherent_impls(
res = res.and(self.check_for_overlapping_inherent_impls(
overlap_mode,
impl1_def_id,
impl2_def_id,
);
));
}
}
}
}
}
res
}
}

View file

@ -5,20 +5,22 @@
use rustc_hir::{self as hir, def, Expr, ImplItem, Item, Node, TraitItem};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
use rustc_span::{sym, DUMMY_SP};
use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP};
use crate::errors::{TaitForwardCompat, TypeOf, UnconstrainedOpaqueType};
pub fn test_opaque_hidden_types(tcx: TyCtxt<'_>) {
pub fn test_opaque_hidden_types(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
let mut res = Ok(());
if tcx.has_attr(CRATE_DEF_ID, sym::rustc_hidden_type_of_opaques) {
for id in tcx.hir().items() {
if matches!(tcx.def_kind(id.owner_id), DefKind::OpaqueTy) {
let type_of = tcx.type_of(id.owner_id).instantiate_identity();
tcx.dcx().emit_err(TypeOf { span: tcx.def_span(id.owner_id), type_of });
res = Err(tcx.dcx().emit_err(TypeOf { span: tcx.def_span(id.owner_id), type_of }));
}
}
}
res
}
/// Checks "defining uses" of opaque `impl Trait` types to ensure that they meet the restrictions

View file

@ -17,7 +17,7 @@
use rustc_hir::def_id::{LocalDefId, LocalModDefId};
use rustc_middle::query::Providers;
use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt};
use rustc_span::{Span, Symbol};
use rustc_span::{ErrorGuaranteed, Span, Symbol};
mod min_specialization;
@ -51,24 +51,29 @@
/// impl<'a> Trait<Foo> for Bar { type X = &'a i32; }
/// // ^ 'a is unused and appears in assoc type, error
/// ```
fn check_mod_impl_wf(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
fn check_mod_impl_wf(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) -> Result<(), ErrorGuaranteed> {
let min_specialization = tcx.features().min_specialization;
let module = tcx.hir_module_items(module_def_id);
let mut res = Ok(());
for id in module.items() {
if matches!(tcx.def_kind(id.owner_id), DefKind::Impl { .. }) {
enforce_impl_params_are_constrained(tcx, id.owner_id.def_id);
res = res.and(enforce_impl_params_are_constrained(tcx, id.owner_id.def_id));
if min_specialization {
check_min_specialization(tcx, id.owner_id.def_id);
res = res.and(check_min_specialization(tcx, id.owner_id.def_id));
}
}
}
res
}
pub fn provide(providers: &mut Providers) {
*providers = Providers { check_mod_impl_wf, ..*providers };
}
fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId) {
fn enforce_impl_params_are_constrained(
tcx: TyCtxt<'_>,
impl_def_id: LocalDefId,
) -> Result<(), ErrorGuaranteed> {
// Every lifetime used in an associated type must be constrained.
let impl_self_ty = tcx.type_of(impl_def_id).instantiate_identity();
if impl_self_ty.references_error() {
@ -80,7 +85,10 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
"potentially unconstrained type parameters weren't evaluated: {impl_self_ty:?}",
),
);
return;
// This is super fishy, but our current `rustc_hir_analysis::check_crate` pipeline depends on
// `type_of` having been called much earlier, and thus this value being read from cache.
// Compilation must continue in order for other important diagnostics to keep showing up.
return Ok(());
}
let impl_generics = tcx.generics_of(impl_def_id);
let impl_predicates = tcx.predicates_of(impl_def_id);
@ -113,13 +121,19 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
})
.collect();
let mut res = Ok(());
for param in &impl_generics.params {
match param.kind {
// Disallow ANY unconstrained type parameters.
ty::GenericParamDefKind::Type { .. } => {
let param_ty = ty::ParamTy::for_def(param);
if !input_parameters.contains(&cgp::Parameter::from(param_ty)) {
report_unused_parameter(tcx, tcx.def_span(param.def_id), "type", param_ty.name);
res = Err(report_unused_parameter(
tcx,
tcx.def_span(param.def_id),
"type",
param_ty.name,
));
}
}
ty::GenericParamDefKind::Lifetime => {
@ -127,27 +141,28 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
if lifetimes_in_associated_types.contains(&param_lt) && // (*)
!input_parameters.contains(&param_lt)
{
report_unused_parameter(
res = Err(report_unused_parameter(
tcx,
tcx.def_span(param.def_id),
"lifetime",
param.name,
);
));
}
}
ty::GenericParamDefKind::Const { .. } => {
let param_ct = ty::ParamConst::for_def(param);
if !input_parameters.contains(&cgp::Parameter::from(param_ct)) {
report_unused_parameter(
res = Err(report_unused_parameter(
tcx,
tcx.def_span(param.def_id),
"const",
param_ct.name,
);
));
}
}
}
}
res
// (*) This is a horrible concession to reality. I think it'd be
// better to just ban unconstrained lifetimes outright, but in
@ -169,7 +184,12 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
// used elsewhere are not projected back out.
}
fn report_unused_parameter(tcx: TyCtxt<'_>, span: Span, kind: &str, name: Symbol) {
fn report_unused_parameter(
tcx: TyCtxt<'_>,
span: Span,
kind: &str,
name: Symbol,
) -> ErrorGuaranteed {
let mut err = struct_span_code_err!(
tcx.dcx(),
span,
@ -188,5 +208,5 @@ impl trait, self type, or predicates",
"proving the result of expressions other than the parameter are unique is not supported",
);
}
err.emit();
err.emit()
}

View file

@ -82,10 +82,14 @@
use rustc_trait_selection::traits::outlives_bounds::InferCtxtExt as _;
use rustc_trait_selection::traits::{self, translate_args_with_cause, wf, ObligationCtxt};
pub(super) fn check_min_specialization(tcx: TyCtxt<'_>, impl_def_id: LocalDefId) {
pub(super) fn check_min_specialization(
tcx: TyCtxt<'_>,
impl_def_id: LocalDefId,
) -> Result<(), ErrorGuaranteed> {
if let Some(node) = parent_specialization_node(tcx, impl_def_id) {
check_always_applicable(tcx, impl_def_id, node);
check_always_applicable(tcx, impl_def_id, node)?;
}
Ok(())
}
fn parent_specialization_node(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId) -> Option<Node> {
@ -109,42 +113,58 @@ fn parent_specialization_node(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId) -> Opti
/// Check that `impl1` is a sound specialization
#[instrument(level = "debug", skip(tcx))]
fn check_always_applicable(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node) {
fn check_always_applicable(
tcx: TyCtxt<'_>,
impl1_def_id: LocalDefId,
impl2_node: Node,
) -> Result<(), ErrorGuaranteed> {
let span = tcx.def_span(impl1_def_id);
check_has_items(tcx, impl1_def_id, impl2_node, span);
let mut res = check_has_items(tcx, impl1_def_id, impl2_node, span);
if let Ok((impl1_args, impl2_args)) = get_impl_args(tcx, impl1_def_id, impl2_node) {
let impl2_def_id = impl2_node.def_id();
debug!(?impl2_def_id, ?impl2_args);
let (impl1_args, impl2_args) = get_impl_args(tcx, impl1_def_id, impl2_node)?;
let impl2_def_id = impl2_node.def_id();
debug!(?impl2_def_id, ?impl2_args);
let parent_args = if impl2_node.is_from_trait() {
impl2_args.to_vec()
} else {
unconstrained_parent_impl_args(tcx, impl2_def_id, impl2_args)
};
let parent_args = if impl2_node.is_from_trait() {
impl2_args.to_vec()
} else {
unconstrained_parent_impl_args(tcx, impl2_def_id, impl2_args)
};
check_constness(tcx, impl1_def_id, impl2_node, span);
check_static_lifetimes(tcx, &parent_args, span);
check_duplicate_params(tcx, impl1_args, &parent_args, span);
check_predicates(tcx, impl1_def_id, impl1_args, impl2_node, impl2_args, span);
}
res = res.and(check_constness(tcx, impl1_def_id, impl2_node, span));
res = res.and(check_static_lifetimes(tcx, &parent_args, span));
res = res.and(check_duplicate_params(tcx, impl1_args, &parent_args, span));
res = res.and(check_predicates(tcx, impl1_def_id, impl1_args, impl2_node, impl2_args, span));
res
}
fn check_has_items(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node, span: Span) {
fn check_has_items(
tcx: TyCtxt<'_>,
impl1_def_id: LocalDefId,
impl2_node: Node,
span: Span,
) -> Result<(), ErrorGuaranteed> {
if let Node::Impl(impl2_id) = impl2_node
&& tcx.associated_item_def_ids(impl1_def_id).is_empty()
{
let base_impl_span = tcx.def_span(impl2_id);
tcx.dcx().emit_err(errors::EmptySpecialization { span, base_impl_span });
return Err(tcx.dcx().emit_err(errors::EmptySpecialization { span, base_impl_span }));
}
Ok(())
}
/// Check that the specializing impl `impl1` is at least as const as the base
/// impl `impl2`
fn check_constness(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node, span: Span) {
fn check_constness(
tcx: TyCtxt<'_>,
impl1_def_id: LocalDefId,
impl2_node: Node,
span: Span,
) -> Result<(), ErrorGuaranteed> {
if impl2_node.is_from_trait() {
// This isn't a specialization
return;
return Ok(());
}
let impl1_constness = tcx.constness(impl1_def_id.to_def_id());
@ -152,9 +172,10 @@ fn check_constness(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node,
if let hir::Constness::Const = impl2_constness {
if let hir::Constness::NotConst = impl1_constness {
tcx.dcx().emit_err(errors::ConstSpecialize { span });
return Err(tcx.dcx().emit_err(errors::ConstSpecialize { span }));
}
}
Ok(())
}
/// Given a specializing impl `impl1`, and the base impl `impl2`, returns two
@ -290,15 +311,17 @@ fn check_duplicate_params<'tcx>(
impl1_args: GenericArgsRef<'tcx>,
parent_args: &Vec<GenericArg<'tcx>>,
span: Span,
) {
) -> Result<(), ErrorGuaranteed> {
let mut base_params = cgp::parameters_for(parent_args, true);
base_params.sort_by_key(|param| param.0);
if let (_, [duplicate, ..]) = base_params.partition_dedup() {
let param = impl1_args[duplicate.0 as usize];
tcx.dcx()
return Err(tcx
.dcx()
.struct_span_err(span, format!("specializing impl repeats parameter `{param}`"))
.emit();
.emit());
}
Ok(())
}
/// Check that `'static` lifetimes are not introduced by the specializing impl.
@ -313,10 +336,11 @@ fn check_static_lifetimes<'tcx>(
tcx: TyCtxt<'tcx>,
parent_args: &Vec<GenericArg<'tcx>>,
span: Span,
) {
) -> Result<(), ErrorGuaranteed> {
if tcx.any_free_region_meets(parent_args, |r| r.is_static()) {
tcx.dcx().emit_err(errors::StaticSpecialize { span });
return Err(tcx.dcx().emit_err(errors::StaticSpecialize { span }));
}
Ok(())
}
/// Check whether predicates on the specializing impl (`impl1`) are allowed.
@ -337,7 +361,7 @@ fn check_predicates<'tcx>(
impl2_node: Node,
impl2_args: GenericArgsRef<'tcx>,
span: Span,
) {
) -> Result<(), ErrorGuaranteed> {
let impl1_predicates: Vec<_> = traits::elaborate(
tcx,
tcx.predicates_of(impl1_def_id).instantiate(tcx, impl1_args).into_iter(),
@ -399,14 +423,16 @@ fn check_predicates<'tcx>(
}
impl2_predicates.extend(traits::elaborate(tcx, always_applicable_traits));
let mut res = Ok(());
for (clause, span) in impl1_predicates {
if !impl2_predicates
.iter()
.any(|pred2| trait_predicates_eq(tcx, clause.as_predicate(), *pred2, span))
{
check_specialization_on(tcx, clause, span)
res = res.and(check_specialization_on(tcx, clause, span))
}
}
res
}
/// Checks if some predicate on the specializing impl (`predicate1`) is the same
@ -443,19 +469,26 @@ fn trait_predicates_eq<'tcx>(
}
#[instrument(level = "debug", skip(tcx))]
fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, clause: ty::Clause<'tcx>, span: Span) {
fn check_specialization_on<'tcx>(
tcx: TyCtxt<'tcx>,
clause: ty::Clause<'tcx>,
span: Span,
) -> Result<(), ErrorGuaranteed> {
match clause.kind().skip_binder() {
// Global predicates are either always true or always false, so we
// are fine to specialize on.
_ if clause.is_global() => (),
_ if clause.is_global() => Ok(()),
// We allow specializing on explicitly marked traits with no associated
// items.
ty::ClauseKind::Trait(ty::TraitPredicate { trait_ref, polarity: _ }) => {
if !matches!(
if matches!(
trait_specialization_kind(tcx, clause),
Some(TraitSpecializationKind::Marker)
) {
tcx.dcx()
Ok(())
} else {
Err(tcx
.dcx()
.struct_span_err(
span,
format!(
@ -463,17 +496,16 @@ fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, clause: ty::Clause<'tcx>, sp
tcx.def_path_str(trait_ref.def_id),
),
)
.emit();
.emit())
}
}
ty::ClauseKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => {
tcx.dcx()
.struct_span_err(
span,
format!("cannot specialize on associated type `{projection_ty} == {term}`",),
)
.emit();
}
ty::ClauseKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => Err(tcx
.dcx()
.struct_span_err(
span,
format!("cannot specialize on associated type `{projection_ty} == {term}`",),
)
.emit()),
ty::ClauseKind::ConstArgHasType(..) => {
// FIXME(min_specialization), FIXME(const_generics):
// It probably isn't right to allow _every_ `ConstArgHasType` but I am somewhat unsure
@ -483,12 +515,12 @@ fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, clause: ty::Clause<'tcx>, sp
// While we do not support constructs like `<T, const N: T>` there is probably no risk of
// soundness bugs, but when we support generic const parameter types this will need to be
// revisited.
Ok(())
}
_ => {
tcx.dcx()
.struct_span_err(span, format!("cannot specialize on predicate `{clause}`"))
.emit();
}
_ => Err(tcx
.dcx()
.struct_span_err(span, format!("cannot specialize on predicate `{clause}`"))
.emit()),
}
}

View file

@ -166,33 +166,29 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
tcx.hir().for_each_module(|module| tcx.ensure().collect_mod_item_types(module))
});
// FIXME(matthewjasper) We shouldn't need to use `track_errors` anywhere in this function
// or the compiler in general.
if tcx.features().rustc_attrs {
tcx.sess.track_errors(|| {
tcx.sess.time("outlives_testing", || outlives::test::test_inferred_outlives(tcx));
})?;
tcx.sess.time("outlives_testing", || outlives::test::test_inferred_outlives(tcx))?;
}
tcx.sess.track_errors(|| {
tcx.sess.time("coherence_checking", || {
// Check impls constrain their parameters
tcx.hir().for_each_module(|module| tcx.ensure().check_mod_impl_wf(module));
tcx.sess.time("coherence_checking", || {
// Check impls constrain their parameters
let res =
tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_impl_wf(module));
// FIXME(matthewjasper) We shouldn't need to use `track_errors` anywhere in this function
// or the compiler in general.
res.and(tcx.sess.track_errors(|| {
for &trait_def_id in tcx.all_local_trait_impls(()).keys() {
tcx.ensure().coherent_trait(trait_def_id);
}
// these queries are executed for side-effects (error reporting):
tcx.ensure().crate_inherent_impls(());
tcx.ensure().crate_inherent_impls_overlap_check(());
});
}))
// these queries are executed for side-effects (error reporting):
.and(tcx.ensure().crate_inherent_impls(()))
.and(tcx.ensure().crate_inherent_impls_overlap_check(()))
})?;
if tcx.features().rustc_attrs {
tcx.sess.track_errors(|| {
tcx.sess.time("variance_testing", || variance::test::test_variance(tcx));
})?;
tcx.sess.time("variance_testing", || variance::test::test_variance(tcx))?;
}
tcx.sess.time("wf_checking", || {
@ -200,7 +196,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
})?;
if tcx.features().rustc_attrs {
tcx.sess.track_errors(|| collect::test_opaque_hidden_types(tcx))?;
collect::test_opaque_hidden_types(tcx)?;
}
// Freeze definitions as we don't add new ones at this point. This improves performance by

View file

@ -1,7 +1,8 @@
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::symbol::sym;
use rustc_span::{symbol::sym, ErrorGuaranteed};
pub fn test_inferred_outlives(tcx: TyCtxt<'_>) {
pub fn test_inferred_outlives(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
let mut res = Ok(());
for id in tcx.hir().items() {
// For unit testing: check for a special "rustc_outlives"
// attribute and report an error with various results if found.
@ -22,7 +23,8 @@ pub fn test_inferred_outlives(tcx: TyCtxt<'_>) {
for p in pred {
err.note(p);
}
err.emit();
res = Err(err.emit());
}
}
res
}

View file

@ -2,19 +2,21 @@
use rustc_hir::def_id::CRATE_DEF_ID;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::sym;
use rustc_span::ErrorGuaranteed;
use crate::errors;
pub fn test_variance(tcx: TyCtxt<'_>) {
pub fn test_variance(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
let mut res = Ok(());
if tcx.has_attr(CRATE_DEF_ID, sym::rustc_variance_of_opaques) {
for id in tcx.hir().items() {
if matches!(tcx.def_kind(id.owner_id), DefKind::OpaqueTy) {
let variances_of = tcx.variances_of(id.owner_id);
tcx.dcx().emit_err(errors::VariancesOf {
res = Err(tcx.dcx().emit_err(errors::VariancesOf {
span: tcx.def_span(id.owner_id),
variances_of: format!("{variances_of:?}"),
});
}));
}
}
}
@ -25,10 +27,11 @@ pub fn test_variance(tcx: TyCtxt<'_>) {
if tcx.has_attr(id.owner_id, sym::rustc_variance) {
let variances_of = tcx.variances_of(id.owner_id);
tcx.dcx().emit_err(errors::VariancesOf {
res = Err(tcx.dcx().emit_err(errors::VariancesOf {
span: tcx.def_span(id.owner_id),
variances_of: format!("{variances_of:?}"),
});
}));
}
}
res
}

View file

@ -71,7 +71,7 @@ pub fn check_expr_closure(
let parent_args =
GenericArgs::identity_for_item(tcx, tcx.typeck_root_def_id(expr_def_id.to_def_id()));
let tupled_upvars_ty = self.next_root_ty_var(TypeVariableOrigin {
let tupled_upvars_ty = self.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::ClosureSynthetic,
span: expr_span,
});
@ -101,7 +101,7 @@ pub fn check_expr_closure(
// Create a type variable (for now) to represent the closure kind.
// It will be unified during the upvar inference phase (`upvar.rs`)
None => self.next_root_ty_var(TypeVariableOrigin {
None => self.next_ty_var(TypeVariableOrigin {
// FIXME(eddyb) distinguish closure kind inference variables from the rest.
kind: TypeVariableOriginKind::ClosureSynthetic,
span: expr_span,

View file

@ -2102,7 +2102,8 @@ fn report_private_fields(
let mut items = self
.tcx
.inherent_impls(def_id)
.iter()
.into_iter()
.flatten()
.flat_map(|i| self.tcx.associated_items(i).in_definition_order())
// Only assoc fn with no receivers.
.filter(|item| {

View file

@ -190,10 +190,6 @@ pub fn err_ctxt(&'a self) -> TypeErrCtxt<'a, 'tcx> {
pub fn errors_reported_since_creation(&self) -> bool {
self.dcx().err_count() > self.err_count_on_creation
}
pub fn next_root_ty_var(&self, origin: TypeVariableOrigin) -> Ty<'tcx> {
Ty::new_var(self.tcx, self.next_ty_var_id_in_universe(origin, ty::UniverseIndex::ROOT))
}
}
impl<'a, 'tcx> Deref for FnCtxt<'a, 'tcx> {

View file

@ -711,14 +711,14 @@ fn assemble_inherent_candidates_for_incoherent_ty(&mut self, self_ty: Ty<'tcx>)
let Some(simp) = simplify_type(self.tcx, self_ty, TreatParams::AsCandidateKey) else {
bug!("unexpected incoherent type: {:?}", self_ty)
};
for &impl_def_id in self.tcx.incoherent_impls(simp) {
for &impl_def_id in self.tcx.incoherent_impls(simp).into_iter().flatten() {
self.assemble_inherent_impl_probe(impl_def_id);
}
}
fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: DefId) {
let impl_def_ids = self.tcx.at(self.span).inherent_impls(def_id);
for &impl_def_id in impl_def_ids.iter() {
let impl_def_ids = self.tcx.at(self.span).inherent_impls(def_id).into_iter().flatten();
for &impl_def_id in impl_def_ids {
self.assemble_inherent_impl_probe(impl_def_id);
}
}

View file

@ -359,7 +359,8 @@ pub fn report_no_match_method_error(
if let ty::Adt(adt_def, _) = ty.kind() {
self.tcx
.inherent_impls(adt_def.did())
.iter()
.into_iter()
.flatten()
.any(|def_id| self.associated_value(*def_id, item_name).is_some())
} else {
false
@ -1048,7 +1049,8 @@ trait bound{s}",
let mut inherent_impls_candidate = self
.tcx
.inherent_impls(adt.did())
.iter()
.into_iter()
.flatten()
.copied()
.filter(|def_id| {
if let Some(assoc) = self.associated_value(*def_id, item_name) {
@ -1103,7 +1105,9 @@ trait bound{s}",
"the {item_kind} was found for\n{type_candidates}{additional_types}"
));
} else {
'outer: for inherent_impl_did in self.tcx.inherent_impls(adt.did()) {
'outer: for inherent_impl_did in
self.tcx.inherent_impls(adt.did()).into_iter().flatten()
{
for inherent_method in
self.tcx.associated_items(inherent_impl_did).in_definition_order()
{
@ -1457,9 +1461,9 @@ fn find_builder_fn(&self, err: &mut Diagnostic, rcvr_ty: Ty<'tcx>) {
let ty::Adt(adt_def, _) = rcvr_ty.kind() else {
return;
};
let mut items = self
.tcx
.inherent_impls(adt_def.did())
// FIXME(oli-obk): try out bubbling this error up one level and cancelling the other error in that case.
let Ok(impls) = self.tcx.inherent_impls(adt_def.did()) else { return };
let mut items = impls
.iter()
.flat_map(|i| self.tcx.associated_items(i).in_definition_order())
// Only assoc fn with no receivers.
@ -1823,7 +1827,8 @@ fn suggest_constraining_numerical_ty(
simplify_type(tcx, ty, TreatParams::AsCandidateKey)
.and_then(|simp| {
tcx.incoherent_impls(simp)
.iter()
.into_iter()
.flatten()
.find_map(|&id| self.associated_value(id, item_name))
})
.is_some()

View file

@ -4,10 +4,10 @@
use rustc_errors::{emitter::HumanReadableErrorType, registry, ColorConfig};
use rustc_session::config::{
build_configuration, build_session_options, rustc_optgroups, BranchProtection, CFGuard, Cfg,
DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation, Externs,
FunctionReturn, InliningThreshold, Input, InstrumentCoverage, InstrumentXRay,
LinkSelfContained, LinkerPluginLto, LocationDetail, LtoCli, NextSolverConfig, OomStrategy,
Options, OutFileName, OutputType, OutputTypes, PAuthKey, PacRet, Passes, Polonius,
CollapseMacroDebuginfo, DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry,
ExternLocation, Externs, FunctionReturn, InliningThreshold, Input, InstrumentCoverage,
InstrumentXRay, LinkSelfContained, LinkerPluginLto, LocationDetail, LtoCli, NextSolverConfig,
OomStrategy, Options, OutFileName, OutputType, OutputTypes, PAuthKey, PacRet, Passes, Polonius,
ProcMacroExecutionStrategy, Strip, SwitchWithOptPath, SymbolManglingVersion, WasiExecModel,
};
use rustc_session::lint::Level;
@ -742,6 +742,7 @@ macro_rules! tracked {
})
);
tracked!(codegen_backend, Some("abc".to_string()));
tracked!(collapse_macro_debuginfo, CollapseMacroDebuginfo::Yes);
tracked!(crate_attr, vec!["abc".to_string()]);
tracked!(cross_crate_inline_threshold, InliningThreshold::Always);
tracked!(debug_info_for_profiling, true);

View file

@ -1027,6 +1027,7 @@ fn load_proc_macro(self, id: DefIndex, tcx: TyCtxt<'tcx>) -> SyntaxExtension {
self.root.edition,
Symbol::intern(name),
&attrs,
false,
)
}

View file

@ -283,7 +283,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
tcx.arena.alloc_from_iter(cdata.get_associated_item_or_field_def_ids(def_id.index))
}
associated_item => { cdata.get_associated_item(def_id.index, tcx.sess) }
inherent_impls => { cdata.get_inherent_implementations_for_type(tcx, def_id.index) }
inherent_impls => { Ok(cdata.get_inherent_implementations_for_type(tcx, def_id.index)) }
item_attrs => { tcx.arena.alloc_from_iter(cdata.get_item_attrs(def_id.index, tcx.sess)) }
is_mir_available => { cdata.is_item_mir_available(def_id.index) }
is_ctfe_mir_available => { cdata.is_ctfe_mir_available(def_id.index) }
@ -328,7 +328,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
traits => { tcx.arena.alloc_from_iter(cdata.get_traits()) }
trait_impls_in_crate => { tcx.arena.alloc_from_iter(cdata.get_trait_impls()) }
implementations_of_trait => { cdata.get_implementations_of_trait(tcx, other) }
crate_incoherent_impls => { cdata.get_incoherent_impls(tcx, other) }
crate_incoherent_impls => { Ok(cdata.get_incoherent_impls(tcx, other)) }
dep_kind => { cdata.dep_kind }
module_children => {

View file

@ -1485,7 +1485,7 @@ fn encode_def_ids(&mut self) {
}
let inherent_impls = tcx.with_stable_hashing_context(|hcx| {
tcx.crate_inherent_impls(()).inherent_impls.to_sorted(&hcx, true)
tcx.crate_inherent_impls(()).unwrap().inherent_impls.to_sorted(&hcx, true)
});
for (def_id, impls) in inherent_impls {
record_defaulted_array!(self.tables.inherent_impls[def_id.to_def_id()] <- impls.iter().map(|def_id| {
@ -2028,7 +2028,7 @@ fn encode_incoherent_impls(&mut self) -> LazyArray<IncoherentImpls> {
empty_proc_macro!(self);
let tcx = self.tcx;
let all_impls = tcx.with_stable_hashing_context(|hcx| {
tcx.crate_inherent_impls(()).incoherent_impls.to_sorted(&hcx, true)
tcx.crate_inherent_impls(()).unwrap().incoherent_impls.to_sorted(&hcx, true)
});
let all_impls: Vec<_> = all_impls

View file

@ -114,6 +114,7 @@ macro_rules! arena_types {
[] mod_child: rustc_middle::metadata::ModChild,
[] features: rustc_feature::Features,
[decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph,
[] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls,
]);
)
}

View file

@ -1098,10 +1098,10 @@ fn pre_fmt_projection(projection: &[PlaceElem<'_>], fmt: &mut Formatter<'_>) ->
| ProjectionElem::Subtype(_)
| ProjectionElem::Downcast(_, _)
| ProjectionElem::Field(_, _) => {
write!(fmt, "(").unwrap();
write!(fmt, "(")?;
}
ProjectionElem::Deref => {
write!(fmt, "(*").unwrap();
write!(fmt, "(*")?;
}
ProjectionElem::Index(_)
| ProjectionElem::ConstantIndex { .. }

View file

@ -82,6 +82,10 @@ impl<T> EraseType for Result<&'_ T, rustc_errors::ErrorGuaranteed> {
type Result = [u8; size_of::<Result<&'static (), rustc_errors::ErrorGuaranteed>>()];
}
impl<T> EraseType for Result<&'_ [T], rustc_errors::ErrorGuaranteed> {
type Result = [u8; size_of::<Result<&'static [()], rustc_errors::ErrorGuaranteed>>()];
}
impl<T> EraseType for Result<&'_ T, traits::CodegenObligationError> {
type Result = [u8; size_of::<Result<&'static (), traits::CodegenObligationError>>()];
}

View file

@ -851,13 +851,13 @@
/// Maps a `DefId` of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
query inherent_impls(key: DefId) -> &'tcx [DefId] {
query inherent_impls(key: DefId) -> Result<&'tcx [DefId], ErrorGuaranteed> {
desc { |tcx| "collecting inherent impls for `{}`", tcx.def_path_str(key) }
cache_on_disk_if { key.is_local() }
separate_provide_extern
}
query incoherent_impls(key: SimplifiedType) -> &'tcx [DefId] {
query incoherent_impls(key: SimplifiedType) -> Result<&'tcx [DefId], ErrorGuaranteed> {
desc { |tcx| "collecting all inherent impls for `{:?}`", key }
}
@ -953,8 +953,9 @@
desc { |tcx| "checking deathness of variables in {}", describe_as_module(key, tcx) }
}
query check_mod_impl_wf(key: LocalModDefId) -> () {
query check_mod_impl_wf(key: LocalModDefId) -> Result<(), ErrorGuaranteed> {
desc { |tcx| "checking that impls are well-formed in {}", describe_as_module(key, tcx) }
ensure_forwards_result_if_red
}
query check_mod_type_wf(key: LocalModDefId) -> Result<(), ErrorGuaranteed> {
@ -1003,15 +1004,16 @@
/// Gets a complete map from all types to their inherent impls.
/// Not meant to be used directly outside of coherence.
query crate_inherent_impls(k: ()) -> &'tcx CrateInherentImpls {
arena_cache
query crate_inherent_impls(k: ()) -> Result<&'tcx CrateInherentImpls, ErrorGuaranteed> {
desc { "finding all inherent impls defined in crate" }
ensure_forwards_result_if_red
}
/// Checks all types in the crate for overlap in their inherent impls. Reports errors.
/// Not meant to be used directly outside of coherence.
query crate_inherent_impls_overlap_check(_: ()) -> () {
query crate_inherent_impls_overlap_check(_: ()) -> Result<(), ErrorGuaranteed> {
desc { "check for overlap between inherent impls defined in this crate" }
ensure_forwards_result_if_red
}
/// Checks whether all impls in the crate pass the overlap check, returning
@ -1637,7 +1639,7 @@
///
/// Do not call this directly, but instead use the `incoherent_impls` query.
/// This query is only used to get the data necessary for that query.
query crate_incoherent_impls(key: (CrateNum, SimplifiedType)) -> &'tcx [DefId] {
query crate_incoherent_impls(key: (CrateNum, SimplifiedType)) -> Result<&'tcx [DefId], ErrorGuaranteed> {
desc { |tcx| "collecting all impls for a type in a crate" }
separate_provide_extern
}

View file

@ -174,7 +174,7 @@ pub fn query_ensure<'tcx, Cache>(
}
#[inline]
pub fn query_ensure_error_guaranteed<'tcx, Cache>(
pub fn query_ensure_error_guaranteed<'tcx, Cache, T>(
tcx: TyCtxt<'tcx>,
execute_query: fn(TyCtxt<'tcx>, Span, Cache::Key, QueryMode) -> Option<Cache::Value>,
query_cache: &Cache,
@ -182,14 +182,16 @@ pub fn query_ensure_error_guaranteed<'tcx, Cache>(
check_cache: bool,
) -> Result<(), ErrorGuaranteed>
where
Cache: QueryCache<Value = super::erase::Erase<Result<(), ErrorGuaranteed>>>,
Cache: QueryCache<Value = super::erase::Erase<Result<T, ErrorGuaranteed>>>,
Result<T, ErrorGuaranteed>: EraseType,
{
let key = key.into_query_param();
if let Some(res) = try_get_cached(tcx, query_cache, &key) {
super::erase::restore(res)
super::erase::restore(res).map(drop)
} else {
execute_query(tcx, DUMMY_SP, key, QueryMode::Ensure { check_cache })
.map(super::erase::restore)
.map(|res| res.map(drop))
// Either we actually executed the query, which means we got a full `Result`,
// or we can just assume the query succeeded, because it was green in the
// incremental cache. If it is green, that means that the previous compilation
@ -205,7 +207,7 @@ macro_rules! query_ensure {
query_ensure($($args)*)
};
([(ensure_forwards_result_if_red) $($rest:tt)*]$($args:tt)*) => {
query_ensure_error_guaranteed($($args)*)
query_ensure_error_guaranteed($($args)*).map(|_| ())
};
([$other:tt $($modifiers:tt)*]$($args:tt)*) => {
query_ensure!([$($modifiers)*]$($args)*)
@ -667,5 +669,7 @@ fn into_query_param(self) -> LocalDefId {
pub use sealed::IntoQueryParam;
use super::erase::EraseType;
#[derive(Copy, Clone, Debug, HashStable)]
pub struct CyclePlaceholder(pub ErrorGuaranteed);

View file

@ -2535,8 +2535,7 @@ pub fn collapsed_debuginfo(self, span: Span, upto: Span) -> Span {
if self.sess.opts.unstable_opts.debug_macros || !span.from_expansion() {
return span;
}
let collapse_debuginfo_enabled = self.features().collapse_debuginfo;
hygiene::walk_chain_collapsed(span, upto, collapse_debuginfo_enabled)
hygiene::walk_chain_collapsed(span, upto, self.features().collapse_debuginfo)
}
#[inline]

View file

@ -254,16 +254,28 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait
}
/// Query provider for `incoherent_impls`.
pub(super) fn incoherent_impls_provider(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] {
pub(super) fn incoherent_impls_provider(
tcx: TyCtxt<'_>,
simp: SimplifiedType,
) -> Result<&[DefId], ErrorGuaranteed> {
let mut impls = Vec::new();
let mut res = Ok(());
for cnum in iter::once(LOCAL_CRATE).chain(tcx.crates(()).iter().copied()) {
for &impl_def_id in tcx.crate_incoherent_impls((cnum, simp)) {
let incoherent_impls = match tcx.crate_incoherent_impls((cnum, simp)) {
Ok(impls) => impls,
Err(e) => {
res = Err(e);
continue;
}
};
for &impl_def_id in incoherent_impls {
impls.push(impl_def_id)
}
}
debug!(?impls);
res?;
tcx.arena.alloc_slice(&impls)
Ok(tcx.arena.alloc_slice(&impls))
}

View file

@ -1433,7 +1433,7 @@ fn collect_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIt
}
fn assoc_fn_of_type<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, fn_ident: Ident) -> Option<DefId> {
for impl_def_id in tcx.inherent_impls(def_id) {
for impl_def_id in tcx.inherent_impls(def_id).ok()? {
if let Some(new) = tcx.associated_items(impl_def_id).find_by_name_and_kind(
tcx,
fn_ident,

View file

@ -1745,12 +1745,10 @@ fn suggest_alternative_construction_methods(
// Doing analysis on local `DefId`s would cause infinite recursion.
return;
}
let Ok(impls) = self.r.tcx.inherent_impls(def_id) else { return };
// Look at all the associated functions without receivers in the type's
// inherent impls to look for builders that return `Self`
let mut items = self
.r
.tcx
.inherent_impls(def_id)
let mut items = impls
.iter()
.flat_map(|i| self.r.tcx.associated_items(i).in_definition_order())
// Only assoc fn with no receivers.

View file

@ -3190,12 +3190,12 @@ pub enum WasiExecModel {
/// how the hash should be calculated when adding a new command-line argument.
pub(crate) mod dep_tracking {
use super::{
BranchProtection, CFGuard, CFProtection, CrateType, DebugInfo, DebugInfoCompression,
ErrorOutputType, FunctionReturn, InliningThreshold, InstrumentCoverage, InstrumentXRay,
LinkerPluginLto, LocationDetail, LtoCli, NextSolverConfig, OomStrategy, OptLevel,
OutFileName, OutputType, OutputTypes, Polonius, RemapPathScopeComponents, ResolveDocLinks,
SourceFileHashAlgorithm, SplitDwarfKind, SwitchWithOptPath, SymbolManglingVersion,
WasiExecModel,
BranchProtection, CFGuard, CFProtection, CollapseMacroDebuginfo, CrateType, DebugInfo,
DebugInfoCompression, ErrorOutputType, FunctionReturn, InliningThreshold,
InstrumentCoverage, InstrumentXRay, LinkerPluginLto, LocationDetail, LtoCli,
NextSolverConfig, OomStrategy, OptLevel, OutFileName, OutputType, OutputTypes, Polonius,
RemapPathScopeComponents, ResolveDocLinks, SourceFileHashAlgorithm, SplitDwarfKind,
SwitchWithOptPath, SymbolManglingVersion, WasiExecModel,
};
use crate::lint;
use crate::utils::NativeLib;
@ -3274,6 +3274,7 @@ fn hash(
LtoCli,
DebugInfo,
DebugInfoCompression,
CollapseMacroDebuginfo,
UnstableFeatures,
NativeLib,
SanitizerSet,
@ -3437,6 +3438,25 @@ pub enum ProcMacroExecutionStrategy {
CrossThread,
}
/// How to perform collapse macros debug info
/// if-ext - if macro from different crate (related to callsite code)
/// | cmd \ attr | no | (unspecified) | external | yes |
/// | no | no | no | no | no |
/// | (unspecified) | no | no | if-ext | yes |
/// | external | no | if-ext | if-ext | yes |
/// | yes | yes | yes | yes | yes |
#[derive(Clone, Copy, PartialEq, Hash, Debug)]
pub enum CollapseMacroDebuginfo {
/// Don't collapse debuginfo for the macro
No = 0,
/// Unspecified value
Unspecified = 1,
/// Collapse debuginfo if the macro comes from a different crate
External = 2,
/// Collapse debuginfo for the macro
Yes = 3,
}
/// Which format to use for `-Z dump-mono-stats`
#[derive(Clone, Copy, PartialEq, Hash, Debug)]
pub enum DumpMonoStatsFormat {

View file

@ -388,6 +388,7 @@ mod desc {
pub const parse_cfprotection: &str = "`none`|`no`|`n` (default), `branch`, `return`, or `full`|`yes`|`y` (equivalent to `branch` and `return`)";
pub const parse_debuginfo: &str = "either an integer (0, 1, 2), `none`, `line-directives-only`, `line-tables-only`, `limited`, or `full`";
pub const parse_debuginfo_compression: &str = "one of `none`, `zlib`, or `zstd`";
pub const parse_collapse_macro_debuginfo: &str = "one of `no`, `external`, or `yes`";
pub const parse_strip: &str = "either `none`, `debuginfo`, or `symbols`";
pub const parse_linker_flavor: &str = ::rustc_target::spec::LinkerFlavorCli::one_of();
pub const parse_optimization_fuel: &str = "crate=integer";
@ -1302,6 +1303,19 @@ pub(crate) fn parse_branch_protection(
true
}
pub(crate) fn parse_collapse_macro_debuginfo(
slot: &mut CollapseMacroDebuginfo,
v: Option<&str>,
) -> bool {
*slot = match v {
Some("no") => CollapseMacroDebuginfo::No,
Some("external") => CollapseMacroDebuginfo::External,
Some("yes") => CollapseMacroDebuginfo::Yes,
_ => return false,
};
true
}
pub(crate) fn parse_proc_macro_execution_strategy(
slot: &mut ProcMacroExecutionStrategy,
v: Option<&str>,
@ -1534,6 +1548,9 @@ pub(crate) fn parse_function_return(slot: &mut FunctionReturn, v: Option<&str>)
"instrument control-flow architecture protection"),
codegen_backend: Option<String> = (None, parse_opt_string, [TRACKED],
"the backend to use"),
collapse_macro_debuginfo: CollapseMacroDebuginfo = (CollapseMacroDebuginfo::Unspecified,
parse_collapse_macro_debuginfo, [TRACKED],
"set option to collapse debuginfo for macros"),
combine_cgu: bool = (false, parse_bool, [TRACKED],
"combine CGUs into a single one"),
crate_attr: Vec<String> = (Vec::new(), parse_string_push, [TRACKED],

View file

@ -464,12 +464,15 @@ fn walk_chain_collapsed(
&self,
mut span: Span,
to: Span,
collapse_debuginfo_enabled: bool,
collapse_debuginfo_feature_enabled: bool,
) -> Span {
let orig_span = span;
let mut ret_span = span;
debug!("walk_chain_collapsed({:?}, {:?})", span, to);
debug!(
"walk_chain_collapsed({:?}, {:?}), feature_enable={}",
span, to, collapse_debuginfo_feature_enabled,
);
debug!("walk_chain_collapsed: span ctxt = {:?}", span.ctxt());
while !span.eq_ctxt(to) && span.from_expansion() {
let outer_expn = self.outer_expn(span.ctxt());
@ -477,7 +480,7 @@ fn walk_chain_collapsed(
let expn_data = self.expn_data(outer_expn);
debug!("walk_chain_collapsed({:?}): expn_data={:?}", span, expn_data);
span = expn_data.call_site;
if !collapse_debuginfo_enabled || expn_data.collapse_debuginfo {
if !collapse_debuginfo_feature_enabled || expn_data.collapse_debuginfo {
ret_span = span;
}
}
@ -601,8 +604,14 @@ pub fn walk_chain(span: Span, to: SyntaxContext) -> Span {
HygieneData::with(|data| data.walk_chain(span, to))
}
pub fn walk_chain_collapsed(span: Span, to: Span, collapse_debuginfo_enabled: bool) -> Span {
HygieneData::with(|hdata| hdata.walk_chain_collapsed(span, to, collapse_debuginfo_enabled))
pub fn walk_chain_collapsed(
span: Span,
to: Span,
collapse_debuginfo_feature_enabled: bool,
) -> Span {
HygieneData::with(|hdata| {
hdata.walk_chain_collapsed(span, to, collapse_debuginfo_feature_enabled)
})
}
pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symbol) {

View file

@ -749,6 +749,7 @@
extern_in_paths,
extern_prelude,
extern_types,
external,
external_doc,
f,
f16c_target_feature,
@ -1811,6 +1812,7 @@
xmm_reg,
yeet_desugar_details,
yeet_expr,
yes,
yield_expr,
ymm_reg,
zmm_reg,

View file

@ -924,14 +924,17 @@ class RustBuild(object):
# default toolchain is not nightly.
#
# But that setting has the collateral effect of rust-analyzer also
# passing RUSTC_BOOTSTRAP=1 to all x.py invocations too (the various
# overrideCommand). For compiling bootstrap, that is unwanted and can
# cause spurious rebuilding of bootstrap when rust-analyzer x.py
# invocations are interleaved with handwritten ones on the command line.
env.pop("RUSTC_BOOTSTRAP", None)
# passing RUSTC_BOOTSTRAP=1 to all x.py invocations too (the various overrideCommand).
# For compiling bootstrap that can cause spurious rebuilding of bootstrap when
# rust-analyzer x.py invocations are interleaved with handwritten ones on the
# command line.
#
# Set RUSTC_BOOTSTRAP=1 consistently.
env["RUSTC_BOOTSTRAP"] = "1"
# preserve existing RUSTFLAGS
env.setdefault("RUSTFLAGS", "")
default_rustflags = "" if env.get("RUSTFLAGS_BOOTSTRAP", "") else "-Zallow-features="
env.setdefault("RUSTFLAGS", default_rustflags)
target_features = []
if self.get_toml("crt-static", build_section) == "true":

View file

@ -1799,15 +1799,20 @@ pub fn cargo(
}
if self.config.rust_remap_debuginfo {
// FIXME: handle vendored sources
let registry_src = t!(home::cargo_home()).join("registry").join("src");
let mut env_var = OsString::new();
for entry in t!(std::fs::read_dir(registry_src)) {
if !env_var.is_empty() {
env_var.push("\t");
}
env_var.push(t!(entry).path());
if self.config.vendor {
let vendor = self.build.src.join("vendor");
env_var.push(vendor);
env_var.push("=/rust/deps");
} else {
let registry_src = t!(home::cargo_home()).join("registry").join("src");
for entry in t!(std::fs::read_dir(registry_src)) {
if !env_var.is_empty() {
env_var.push("\t");
}
env_var.push(t!(entry).path());
env_var.push("=/rust/deps");
}
}
cargo.env("RUSTC_CARGO_REGISTRY_SRC_TO_REMAP", env_var);
}

View file

@ -312,7 +312,7 @@ pub(crate) fn build_impls(
let tcx = cx.tcx;
// for each implementation of an item represented by `did`, build the clean::Item for that impl
for &did in tcx.inherent_impls(did).iter() {
for &did in tcx.inherent_impls(did).into_iter().flatten() {
build_impl(cx, did, attrs, ret);
}
@ -325,7 +325,7 @@ pub(crate) fn build_impls(
if tcx.has_attr(did, sym::rustc_has_incoherent_inherent_impls) {
let type_ =
if tcx.is_trait(did) { SimplifiedType::Trait(did) } else { SimplifiedType::Adt(did) };
for &did in tcx.incoherent_impls(type_) {
for &did in tcx.incoherent_impls(type_).into_iter().flatten() {
build_impl(cx, did, attrs, ret);
}
}

View file

@ -1862,7 +1862,7 @@ pub(crate) fn impls<'tcx>(&self, tcx: TyCtxt<'tcx>) -> impl Iterator<Item = DefI
.get(self)
.into_iter()
.flatten()
.flat_map(move |&simp| tcx.incoherent_impls(simp))
.flat_map(move |&simp| tcx.incoherent_impls(simp).into_iter().flatten())
.copied()
}
@ -1870,7 +1870,7 @@ pub(crate) fn all_impls(tcx: TyCtxt<'_>) -> impl Iterator<Item = DefId> + '_ {
Self::simplified_types()
.values()
.flatten()
.flat_map(move |&simp| tcx.incoherent_impls(simp))
.flat_map(move |&simp| tcx.incoherent_impls(simp).into_iter().flatten())
.copied()
}

View file

@ -624,7 +624,8 @@ fn resolve_associated_item(
// Checks if item_name belongs to `impl SomeItem`
let mut assoc_items: Vec<_> = tcx
.inherent_impls(did)
.iter()
.into_iter()
.flatten()
.flat_map(|&imp| {
filter_assoc_items_by_name_and_namespace(
tcx,

View file

@ -386,7 +386,8 @@ fn has_unsafe<'tcx>(cx: &LateContext<'tcx>, item: &'tcx Item<'_>) -> bool {
&& cx
.tcx
.inherent_impls(def.did())
.iter()
.into_iter()
.flatten()
.map(|imp_did| cx.tcx.hir().expect_item(imp_did.expect_local()))
.any(|imp| has_unsafe(cx, imp))
{

View file

@ -53,9 +53,10 @@ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
// List of spans to lint. (lint_span, first_span)
let mut lint_spans = Vec::new();
let Ok(impls) = cx.tcx.crate_inherent_impls(()) else { return };
let inherent_impls = cx
.tcx
.with_stable_hashing_context(|hcx| cx.tcx.crate_inherent_impls(()).inherent_impls.to_sorted(&hcx, true));
.with_stable_hashing_context(|hcx| impls.inherent_impls.to_sorted(&hcx, true));
for (_, impl_ids) in inherent_impls.into_iter().filter(|(&id, impls)| {
impls.len() > 1

View file

@ -139,7 +139,7 @@ fn deref_chain<'cx, 'tcx>(cx: &'cx LateContext<'tcx>, ty: Ty<'tcx>) -> impl Iter
fn adt_has_inherent_method(cx: &LateContext<'_>, ty: Ty<'_>, method_name: Symbol) -> bool {
if let Some(ty_did) = ty.ty_adt_def().map(ty::AdtDef::did) {
cx.tcx.inherent_impls(ty_did).iter().any(|&did| {
cx.tcx.inherent_impls(ty_did).into_iter().flatten().any(|&did| {
cx.tcx
.associated_items(did)
.filter_by_name_unhygienic(method_name)

View file

@ -441,7 +441,8 @@ fn check_for_is_empty(
let is_empty = cx
.tcx
.inherent_impls(impl_ty)
.iter()
.into_iter()
.flatten()
.flat_map(|&id| cx.tcx.associated_items(id).filter_by_name_unhygienic(is_empty))
.find(|item| item.kind == AssocKind::Fn);
@ -605,7 +606,7 @@ fn is_is_empty(cx: &LateContext<'_>, item: &ty::AssocItem) -> bool {
/// Checks the inherent impl's items for an `is_empty(self)` method.
fn has_is_empty_impl(cx: &LateContext<'_>, id: DefId) -> bool {
let is_empty = sym!(is_empty);
cx.tcx.inherent_impls(id).iter().any(|imp| {
cx.tcx.inherent_impls(id).into_iter().flatten().any(|imp| {
cx.tcx
.associated_items(*imp)
.filter_by_name_unhygienic(is_empty)

View file

@ -73,7 +73,8 @@ fn check_unwrap_or_default(
let has_suggested_method = receiver_ty.ty_adt_def().is_some_and(|adt_def| {
cx.tcx
.inherent_impls(adt_def.did())
.iter()
.into_iter()
.flatten()
.flat_map(|impl_id| cx.tcx.associated_items(impl_id).filter_by_name_unhygienic(sugg))
.any(|assoc| {
assoc.fn_has_self_parameter

View file

@ -534,10 +534,11 @@ fn find_primitive_impls<'tcx>(tcx: TyCtxt<'tcx>, name: &str) -> impl Iterator<It
"u128" => SimplifiedType::Uint(UintTy::U128),
"f32" => SimplifiedType::Float(FloatTy::F32),
"f64" => SimplifiedType::Float(FloatTy::F64),
_ => return [].iter().copied(),
#[allow(trivial_casts)]
_ => return Result::<_, rustc_errors::ErrorGuaranteed>::Ok(&[] as &[_]).into_iter().flatten().copied(),
};
tcx.incoherent_impls(ty).iter().copied()
tcx.incoherent_impls(ty).into_iter().flatten().copied()
}
fn non_local_item_children_by_name(tcx: TyCtxt<'_>, def_id: DefId, name: Symbol) -> Vec<Res> {
@ -663,7 +664,8 @@ fn find_crates(tcx: TyCtxt<'_>, name: Symbol) -> impl Iterator<Item = DefId> + '
// `impl S { ... }`
let inherent_impl_children = tcx
.inherent_impls(def_id)
.iter()
.into_iter()
.flatten()
.flat_map(|&impl_def_id| item_children_by_name(tcx, impl_def_id, segment));
let direct_children = item_children_by_name(tcx, def_id, segment);

View file

@ -1,5 +1,5 @@
//! The man pages for mmap/munmap suggest that it is possible to partly unmap a previously-mapped
//! region of addres space, but to LLVM that would be partial deallocation, which LLVM does not
//! region of address space, but to LLVM that would be partial deallocation, which LLVM does not
//! support. So even though the man pages say this sort of use is possible, we must report UB.
//@ignore-target-windows: No libc on Windows
//@normalize-stderr-test: "size [0-9]+ and alignment" -> "size SIZE and alignment"

View file

@ -0,0 +1,31 @@
// ignore-lldb
#![feature(collapse_debuginfo)]
// Test that local macro debug info is not collapsed with #[collapse_debuginfo(external)]
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:next
// gdb-command:frame
// gdb-check:[...]#one_callsite[...]
// gdb-command:continue
fn one() {
println!("one");
}
#[collapse_debuginfo(external)]
macro_rules! outer {
() => {
one(); // #one_callsite
};
}
fn main() {
let ret = 0; // #break
outer!();
std::process::exit(ret);
}

View file

@ -0,0 +1,34 @@
// ignore-lldb
#![feature(collapse_debuginfo)]
// Test that macro attribute #[collapse_debuginfo(no)]
// overrides "collapse_macro_debuginfo=external" flag
// compile-flags:-g -Z collapse_macro_debuginfo=external
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:next
// gdb-command:frame
// gdb-check:[...]#one_callsite[...]
// gdb-command:next
// gdb-command:frame
// gdb-command:continue
fn one() {
println!("one");
}
#[collapse_debuginfo(no)]
macro_rules! outer {
() => {
one(); // #one_callsite
};
}
fn main() {
let ret = 0; // #break
outer!();
std::process::exit(ret);
}

View file

@ -0,0 +1,26 @@
// ignore-lldb
#![feature(collapse_debuginfo)]
// Test that println macro debug info is collapsed with "collapse_macro_debuginfo=external" flag
// compile-flags:-g -Z collapse_macro_debuginfo=external
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:next
// gdb-command:frame
// gdb-check:[...]#println_callsite[...]
// gdb-command:continue
macro_rules! outer {
() => {
println!("one"); // #println_callsite
};
}
fn main() {
let ret = 0; // #break
outer!();
std::process::exit(ret);
}

View file

@ -4,7 +4,7 @@
// Test that line numbers are not replaced with those of the outermost expansion site when the
// `collapse_debuginfo` feature is active and the attribute is not provided.
// compile-flags:-g
// compile-flags:-g -Z collapse_macro_debuginfo=no
// === GDB TESTS ===================================================================================

View file

@ -0,0 +1,57 @@
// ignore-lldb
#![feature(collapse_debuginfo)]
// Test that line numbers are replaced with those of the outermost expansion site when the
// `collapse_debuginfo` feature is active and the command line flag is provided.
// compile-flags:-g -Z collapse_macro_debuginfo=yes
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:next
// gdb-command:frame
// gdb-check:[...]#loc1[...]
// gdb-command:next
// gdb-command:frame
// gdb-check:[...]#loc2[...]
// gdb-command:next
// gdb-command:frame
// gdb-check:[...]#loc3[...]
// gdb-command:continue
fn one() {
println!("one");
}
fn two() {
println!("two");
}
fn three() {
println!("three");
}
fn four() {
println!("four");
}
macro_rules! outer {
($b:block) => {
one();
inner!();
$b
};
}
macro_rules! inner {
() => {
two();
};
}
fn main() {
let ret = 0; // #break
outer!({ // #loc1
three(); // #loc2
four(); // #loc3
});
std::process::exit(ret);
}

View file

@ -1,5 +1,5 @@
Each of these needs to be in a separate file,
because the `span_delayed_bug` ICE in rustdoc won't be triggerred
because the `span_delayed_bug` ICE in rustdoc won't be triggered
if even a single other error was emitted.
However, conceptually they are all testing basically the same thing.

View file

@ -13,7 +13,7 @@ LL | .intel_syntax noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:32:15
--> $DIR/inline-syntax.rs:38:15
|
LL | asm!(".intel_syntax noprefix", "nop");
| ^
@ -25,7 +25,7 @@ LL | .intel_syntax noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:35:15
--> $DIR/inline-syntax.rs:42:15
|
LL | asm!(".intel_syntax aaa noprefix", "nop");
| ^
@ -37,7 +37,7 @@ LL | .intel_syntax aaa noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:38:15
--> $DIR/inline-syntax.rs:46:15
|
LL | asm!(".att_syntax noprefix", "nop");
| ^
@ -49,7 +49,7 @@ LL | .att_syntax noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:41:15
--> $DIR/inline-syntax.rs:50:15
|
LL | asm!(".att_syntax bbb noprefix", "nop");
| ^
@ -61,7 +61,7 @@ LL | .att_syntax bbb noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:44:15
--> $DIR/inline-syntax.rs:54:15
|
LL | asm!(".intel_syntax noprefix; nop");
| ^
@ -73,7 +73,7 @@ LL | .intel_syntax noprefix; nop
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:50:13
--> $DIR/inline-syntax.rs:61:13
|
LL | .intel_syntax noprefix
| ^

View file

@ -0,0 +1,90 @@
error: unknown directive
|
note: instantiated into assembly here
--> <inline asm>:1:1
|
LL | .intel_syntax noprefix
| ^
error: unknown directive
|
note: instantiated into assembly here
--> <inline asm>:1:1
|
LL | .intel_syntax noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:38:15
|
LL | asm!(".intel_syntax noprefix", "nop");
| ^
|
note: instantiated into assembly here
--> <inline asm>:1:2
|
LL | .intel_syntax noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:42:15
|
LL | asm!(".intel_syntax aaa noprefix", "nop");
| ^
|
note: instantiated into assembly here
--> <inline asm>:1:2
|
LL | .intel_syntax aaa noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:46:15
|
LL | asm!(".att_syntax noprefix", "nop");
| ^
|
note: instantiated into assembly here
--> <inline asm>:1:2
|
LL | .att_syntax noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:50:15
|
LL | asm!(".att_syntax bbb noprefix", "nop");
| ^
|
note: instantiated into assembly here
--> <inline asm>:1:2
|
LL | .att_syntax bbb noprefix
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:54:15
|
LL | asm!(".intel_syntax noprefix; nop");
| ^
|
note: instantiated into assembly here
--> <inline asm>:1:2
|
LL | .intel_syntax noprefix; nop
| ^
error: unknown directive
--> $DIR/inline-syntax.rs:61:13
|
LL | .intel_syntax noprefix
| ^
|
note: instantiated into assembly here
--> <inline asm>:2:13
|
LL | .intel_syntax noprefix
| ^
error: aborting due to 8 previous errors

View file

@ -1,4 +1,4 @@
// revisions: x86_64 arm
// revisions: x86_64 arm arm_llvm_18
//[x86_64] compile-flags: --target x86_64-unknown-linux-gnu
//[x86_64] check-pass
//[x86_64] needs-llvm-components: x86
@ -8,6 +8,12 @@
//[arm] compile-flags: --target armv7-unknown-linux-gnueabihf
//[arm] build-fail
//[arm] needs-llvm-components: arm
//[arm] ignore-llvm-version: 18 - 99
// Newer LLVM produces extra error notes.
//[arm_llvm_18] compile-flags: --target armv7-unknown-linux-gnueabihf
//[arm_llvm_18] build-fail
//[arm_llvm_18] needs-llvm-components: arm
//[arm_llvm_18] min-llvm-version: 18
// needs-asm-support
#![feature(no_core, lang_items, rustc_attrs)]
@ -32,18 +38,23 @@ pub fn main() {
asm!(".intel_syntax noprefix", "nop");
//[x86_64]~^ WARN avoid using `.intel_syntax`
//[arm]~^^ ERROR unknown directive
//[arm_llvm_18]~^^^ ERROR unknown directive
asm!(".intel_syntax aaa noprefix", "nop");
//[x86_64]~^ WARN avoid using `.intel_syntax`
//[arm]~^^ ERROR unknown directive
//[arm_llvm_18]~^^^ ERROR unknown directive
asm!(".att_syntax noprefix", "nop");
//[x86_64]~^ WARN avoid using `.att_syntax`
//[arm]~^^ ERROR unknown directive
//[arm_llvm_18]~^^^ ERROR unknown directive
asm!(".att_syntax bbb noprefix", "nop");
//[x86_64]~^ WARN avoid using `.att_syntax`
//[arm]~^^ ERROR unknown directive
//[arm_llvm_18]~^^^ ERROR unknown directive
asm!(".intel_syntax noprefix; nop");
//[x86_64]~^ WARN avoid using `.intel_syntax`
//[arm]~^^ ERROR unknown directive
//[arm_llvm_18]~^^^ ERROR unknown directive
asm!(
r"
@ -52,6 +63,7 @@ pub fn main() {
);
//[x86_64]~^^^ WARN avoid using `.intel_syntax`
//[arm]~^^^^ ERROR unknown directive
//[arm_llvm_18]~^^^^^ ERROR unknown directive
}
}

View file

@ -1,5 +1,5 @@
warning: avoid using `.intel_syntax`, Intel syntax is the default
--> $DIR/inline-syntax.rs:58:14
--> $DIR/inline-syntax.rs:70:14
|
LL | global_asm!(".intel_syntax noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^^^
@ -7,37 +7,37 @@ LL | global_asm!(".intel_syntax noprefix", "nop");
= note: `#[warn(bad_asm_style)]` on by default
warning: avoid using `.intel_syntax`, Intel syntax is the default
--> $DIR/inline-syntax.rs:32:15
--> $DIR/inline-syntax.rs:38:15
|
LL | asm!(".intel_syntax noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.intel_syntax`, Intel syntax is the default
--> $DIR/inline-syntax.rs:35:15
--> $DIR/inline-syntax.rs:42:15
|
LL | asm!(".intel_syntax aaa noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.att_syntax`, prefer using `options(att_syntax)` instead
--> $DIR/inline-syntax.rs:38:15
--> $DIR/inline-syntax.rs:46:15
|
LL | asm!(".att_syntax noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.att_syntax`, prefer using `options(att_syntax)` instead
--> $DIR/inline-syntax.rs:41:15
--> $DIR/inline-syntax.rs:50:15
|
LL | asm!(".att_syntax bbb noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.intel_syntax`, Intel syntax is the default
--> $DIR/inline-syntax.rs:44:15
--> $DIR/inline-syntax.rs:54:15
|
LL | asm!(".intel_syntax noprefix; nop");
| ^^^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.intel_syntax`, Intel syntax is the default
--> $DIR/inline-syntax.rs:50:13
--> $DIR/inline-syntax.rs:61:13
|
LL | .intel_syntax noprefix
| ^^^^^^^^^^^^^^^^^^^^^^

View file

@ -7,6 +7,8 @@ trait Foo<T> {
impl Foo<char> for Bar {
fn foo<F2: Foo<u8>>(self) -> impl Foo<u8> {
//~^ ERROR: the trait bound `impl Foo<u8>: Foo<char>` is not satisfied [E0277]
//~| ERROR: the trait bound `Bar: Foo<u8>` is not satisfied [E0277]
//~| ERROR: impl has stricter requirements than trait
self
}
}

View file

@ -11,6 +11,28 @@ note: required by a bound in `Foo::{opaque#0}`
LL | fn foo<F2>(self) -> impl Foo<T>;
| ^^^^^^ required by this bound in `Foo::{opaque#0}`
error: aborting due to 1 previous error
error[E0276]: impl has stricter requirements than trait
--> $DIR/return-dont-satisfy-bounds.rs:8:16
|
LL | fn foo<F2>(self) -> impl Foo<T>;
| -------------------------------- definition of `foo` from trait
...
LL | fn foo<F2: Foo<u8>>(self) -> impl Foo<u8> {
| ^^^^^^^ impl has extra requirement `F2: Foo<u8>`
For more information about this error, try `rustc --explain E0277`.
error[E0277]: the trait bound `Bar: Foo<u8>` is not satisfied
--> $DIR/return-dont-satisfy-bounds.rs:8:34
|
LL | fn foo<F2: Foo<u8>>(self) -> impl Foo<u8> {
| ^^^^^^^^^^^^ the trait `Foo<u8>` is not implemented for `Bar`
...
LL | self
| ---- return type was inferred to be `Bar` here
|
= help: the trait `Foo<char>` is implemented for `Bar`
= help: for that trait implementation, expected `char`, found `u8`
error: aborting due to 3 previous errors
Some errors have detailed explanations: E0276, E0277.
For more information about an error, try `rustc --explain E0276`.

View file

@ -7,6 +7,16 @@ LL | impl<T: Default> A for T {
LL | impl<T: Default + ~const Sup> const A for T {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation
error: aborting due to 1 previous error
error[E0308]: mismatched types
--> $DIR/specializing-constness-2.rs:27:5
|
LL | <T as A>::a();
| ^^^^^^^^^^^^^ expected `host`, found `true`
|
= note: expected constant `host`
found constant `true`
For more information about this error, try `rustc --explain E0119`.
error: aborting due to 2 previous errors
Some errors have detailed explanations: E0119, E0308.
For more information about an error, try `rustc --explain E0119`.