Make rustc_next_trait_solver nightly again

This commit is contained in:
Michael Goulet 2024-06-18 13:21:48 -04:00
parent c20d909701
commit 9b0f9ef42e
9 changed files with 88 additions and 60 deletions

View file

@ -4913,6 +4913,7 @@ version = "0.0.0"
dependencies = [
"bitflags 2.5.0",
"derivative",
"indexmap",
"rustc_ast_ir",
"rustc_data_structures",
"rustc_index",

View file

@ -7,7 +7,7 @@ edition = "2021"
# tidy-alphabetical-start
bitflags = "2.4.1"
derivative = "2.2.0"
rustc_ast_ir = { path = "../rustc_ast_ir" }
rustc_ast_ir = { path = "../rustc_ast_ir", default-features = false }
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
rustc_index = { path = "../rustc_index", default-features = false }
rustc_macros = { path = "../rustc_macros", optional = true }

View file

@ -4,8 +4,6 @@
//! but were uplifted in the process of making the new trait solver generic.
//! So if you got to this crate from the old solver, it's totally normal.
#![feature(let_chains)]
pub mod canonicalizer;
pub mod infcx;
pub mod resolve;

View file

@ -2,7 +2,7 @@
//! traits, `Copy`/`Clone`.
use rustc_ast_ir::{Movability, Mutability};
use rustc_data_structures::fx::FxHashMap;
use rustc_type_ir::data_structures::HashMap;
use rustc_type_ir::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
use rustc_type_ir::inherent::*;
use rustc_type_ir::lang_items::TraitSolverLangItem;
@ -304,9 +304,10 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_callable<I: Intern
let kind_ty = args.kind_ty();
let sig = args.coroutine_closure_sig().skip_binder();
let coroutine_ty = if let Some(closure_kind) = kind_ty.to_opt_closure_kind()
&& !args.tupled_upvars_ty().is_ty_var()
{
// FIXME: let_chains
let kind = kind_ty.to_opt_closure_kind();
let coroutine_ty = if kind.is_some() && !args.tupled_upvars_ty().is_ty_var() {
let closure_kind = kind.unwrap();
if !closure_kind.extends(goal_kind) {
return Err(NoSolution);
}
@ -411,10 +412,11 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I:
let kind_ty = args.kind_ty();
let sig = args.coroutine_closure_sig().skip_binder();
let mut nested = vec![];
let coroutine_ty = if let Some(closure_kind) = kind_ty.to_opt_closure_kind()
&& !args.tupled_upvars_ty().is_ty_var()
{
if !closure_kind.extends(goal_kind) {
// FIXME: let_chains
let kind = kind_ty.to_opt_closure_kind();
let coroutine_ty = if kind.is_some() && !args.tupled_upvars_ty().is_ty_var() {
if !kind.unwrap().extends(goal_kind) {
return Err(NoSolution);
}
@ -683,7 +685,7 @@ pub(in crate::solve) fn predicates_for_object_candidate<Infcx, I>(
);
}
let mut replace_projection_with = FxHashMap::default();
let mut replace_projection_with = HashMap::default();
for bound in object_bounds {
if let ty::ExistentialPredicate::Projection(proj) = bound.skip_binder() {
let proj = proj.with_self_ty(tcx, trait_ref.self_ty());
@ -713,7 +715,7 @@ pub(in crate::solve) fn predicates_for_object_candidate<Infcx, I>(
struct ReplaceProjectionWith<'a, Infcx: SolverDelegate<Interner = I>, I: Interner> {
ecx: &'a EvalCtxt<'a, Infcx>,
param_env: I::ParamEnv,
mapping: FxHashMap<I::DefId, ty::Binder<I, ty::ProjectionPredicate<I>>>,
mapping: HashMap<I::DefId, ty::Binder<I, ty::ProjectionPredicate<I>>>,
nested: Vec<Goal<I, I::Predicate>>,
}
@ -725,24 +727,28 @@ fn interner(&self) -> I {
}
fn fold_ty(&mut self, ty: I::Ty) -> I::Ty {
if let ty::Alias(ty::Projection, alias_ty) = ty.kind()
&& let Some(replacement) = self.mapping.get(&alias_ty.def_id)
{
// We may have a case where our object type's projection bound is higher-ranked,
// but the where clauses we instantiated are not. We can solve this by instantiating
// the binder at the usage site.
let proj = self.ecx.instantiate_binder_with_infer(*replacement);
// FIXME: Technically this equate could be fallible...
self.nested.extend(
self.ecx
.eq_and_get_goals(
self.param_env,
alias_ty,
proj.projection_term.expect_ty(self.ecx.interner()),
)
.expect("expected to be able to unify goal projection with dyn's projection"),
);
proj.term.expect_ty()
if let ty::Alias(ty::Projection, alias_ty) = ty.kind() {
if let Some(replacement) = self.mapping.get(&alias_ty.def_id) {
// We may have a case where our object type's projection bound is higher-ranked,
// but the where clauses we instantiated are not. We can solve this by instantiating
// the binder at the usage site.
let proj = self.ecx.instantiate_binder_with_infer(*replacement);
// FIXME: Technically this equate could be fallible...
self.nested.extend(
self.ecx
.eq_and_get_goals(
self.param_env,
alias_ty,
proj.projection_term.expect_ty(self.ecx.interner()),
)
.expect(
"expected to be able to unify goal projection with dyn's projection",
),
);
proj.term.expect_ty()
} else {
ty.super_fold_with(self)
}
} else {
ty.super_fold_with(self)
}

View file

@ -1,7 +1,8 @@
use std::ops::ControlFlow;
use rustc_data_structures::stack::ensure_sufficient_stack;
#[cfg(feature = "nightly")]
use rustc_macros::{HashStable_NoContext, TyDecodable, TyEncodable};
use rustc_type_ir::data_structures::ensure_sufficient_stack;
use rustc_type_ir::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
use rustc_type_ir::inherent::*;
use rustc_type_ir::relate::Relate;
@ -88,7 +89,7 @@ pub struct EvalCtxt<'a, Infcx, I = <Infcx as SolverDelegate>::Interner>
#[derive(derivative::Derivative)]
#[derivative(Clone(bound = ""), Debug(bound = ""), Default(bound = ""))]
#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)]
#[derive(TyDecodable, TyEncodable, HashStable_NoContext)]
#[cfg_attr(feature = "nightly", derive(TyDecodable, TyEncodable, HashStable_NoContext))]
// FIXME: This can be made crate-private once `EvalCtxt` also lives in this crate.
pub struct NestedGoals<I: Interner> {
/// These normalizes-to goals are treated specially during the evaluation
@ -116,7 +117,8 @@ pub fn is_empty(&self) -> bool {
}
}
#[derive(PartialEq, Eq, Debug, Hash, HashStable_NoContext, Clone, Copy)]
#[derive(PartialEq, Eq, Debug, Hash, Clone, Copy)]
#[cfg_attr(feature = "nightly", derive(HashStable_NoContext))]
pub enum GenerateProofTree {
Yes,
No,
@ -689,14 +691,15 @@ impl<Infcx: SolverDelegate<Interner = I>, I: Interner> TypeVisitor<I>
fn visit_ty(&mut self, t: I::Ty) -> Self::Result {
match t.kind() {
ty::Infer(ty::TyVar(vid)) => {
if let ty::TermKind::Ty(term) = self.term.kind()
&& let ty::Infer(ty::TyVar(term_vid)) = term.kind()
&& self.infcx.root_ty_var(vid) == self.infcx.root_ty_var(term_vid)
{
ControlFlow::Break(())
} else {
self.check_nameable(self.infcx.universe_of_ty(vid).unwrap())
if let ty::TermKind::Ty(term) = self.term.kind() {
if let ty::Infer(ty::TyVar(term_vid)) = term.kind() {
if self.infcx.root_ty_var(vid) == self.infcx.root_ty_var(term_vid) {
return ControlFlow::Break(());
}
}
}
self.check_nameable(self.infcx.universe_of_ty(vid).unwrap())
}
ty::Placeholder(p) => self.check_nameable(p.universe()),
_ => {
@ -712,14 +715,18 @@ fn visit_ty(&mut self, t: I::Ty) -> Self::Result {
fn visit_const(&mut self, c: I::Const) -> Self::Result {
match c.kind() {
ty::ConstKind::Infer(ty::InferConst::Var(vid)) => {
if let ty::TermKind::Const(term) = self.term.kind()
&& let ty::ConstKind::Infer(ty::InferConst::Var(term_vid)) = term.kind()
&& self.infcx.root_const_var(vid) == self.infcx.root_const_var(term_vid)
{
ControlFlow::Break(())
} else {
self.check_nameable(self.infcx.universe_of_ct(vid).unwrap())
if let ty::TermKind::Const(term) = self.term.kind() {
if let ty::ConstKind::Infer(ty::InferConst::Var(term_vid)) = term.kind()
{
if self.infcx.root_const_var(vid)
== self.infcx.root_const_var(term_vid)
{
return ControlFlow::Break(());
}
}
}
self.check_nameable(self.infcx.universe_of_ct(vid).unwrap())
}
ty::ConstKind::Placeholder(p) => self.check_nameable(p.universe()),
_ => {

View file

@ -1,7 +1,7 @@
use std::mem;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_index::{Idx, IndexVec};
use rustc_type_ir::data_structures::{HashMap, HashSet};
use rustc_type_ir::inherent::*;
use rustc_type_ir::Interner;
use tracing::debug;
@ -17,6 +17,7 @@
rustc_index::newtype_index! {
#[orderable]
#[gate_rustc_only]
pub struct StackDepth {}
}
@ -70,7 +71,7 @@ struct StackEntry<I: Interner> {
/// C :- D
/// D :- C
/// ```
cycle_participants: FxHashSet<CanonicalInput<I>>,
cycle_participants: HashSet<CanonicalInput<I>>,
/// Starts out as `None` and gets set when rerunning this
/// goal in case we encounter a cycle.
provisional_result: Option<QueryResult<I>>,
@ -126,7 +127,7 @@ pub(super) struct SearchGraph<I: Interner> {
///
/// An element is *deeper* in the stack if its index is *lower*.
stack: IndexVec<StackDepth, StackEntry<I>>,
provisional_cache: FxHashMap<CanonicalInput<I>, ProvisionalCacheEntry<I>>,
provisional_cache: HashMap<CanonicalInput<I>, ProvisionalCacheEntry<I>>,
}
impl<I: Interner> SearchGraph<I> {
@ -227,13 +228,17 @@ fn tag_cycle_participants(
}
fn clear_dependent_provisional_results(
provisional_cache: &mut FxHashMap<CanonicalInput<I>, ProvisionalCacheEntry<I>>,
provisional_cache: &mut HashMap<CanonicalInput<I>, ProvisionalCacheEntry<I>>,
head: StackDepth,
) {
#[allow(rustc::potential_query_instability)]
provisional_cache.retain(|_, entry| {
entry.with_coinductive_stack.take_if(|p| p.head == head);
entry.with_inductive_stack.take_if(|p| p.head == head);
if entry.with_coinductive_stack.as_ref().is_some_and(|p| p.head == head) {
entry.with_coinductive_stack.take();
}
if entry.with_inductive_stack.as_ref().is_some_and(|p| p.head == head) {
entry.with_inductive_stack.take();
}
!entry.is_empty()
});
}

View file

@ -1,7 +1,7 @@
//! Dealing with trait goals, i.e. `T: Trait<'a, U>`.
use rustc_ast_ir::Movability;
use rustc_data_structures::fx::FxIndexSet;
use rustc_type_ir::data_structures::IndexSet;
use rustc_type_ir::inherent::*;
use rustc_type_ir::lang_items::TraitSolverLangItem;
use rustc_type_ir::visit::TypeVisitableExt as _;
@ -821,7 +821,7 @@ fn consider_builtin_upcast_to_principal(
// We may upcast to auto traits that are either explicitly listed in
// the object type's bounds, or implied by the principal trait ref's
// supertraits.
let a_auto_traits: FxIndexSet<I::DefId> = a_data
let a_auto_traits: IndexSet<I::DefId> = a_data
.auto_traits()
.into_iter()
.chain(a_data.principal_def_id().into_iter().flat_map(|principal_def_id| {

View file

@ -7,6 +7,7 @@ edition = "2021"
# tidy-alphabetical-start
bitflags = "2.4.1"
derivative = "2.2.0"
indexmap = "2.0.0"
rustc_ast_ir = { path = "../rustc_ast_ir", default-features = false }
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
rustc_index = { path = "../rustc_index", default-features = false }

View file

@ -2,18 +2,28 @@
mod impl_ {
pub use rustc_data_structures::fx::FxHashMap as HashMap;
pub use rustc_data_structures::fx::FxHashSet as HashSet;
pub use rustc_data_structures::sso::SsoHashMap as SsoHashMap;
pub use rustc_data_structures::sso::SsoHashSet as SsoHashSet;
pub use rustc_data_structures::fx::FxIndexMap as IndexMap;
pub use rustc_data_structures::fx::FxIndexSet as IndexSet;
pub use rustc_data_structures::sso::SsoHashMap;
pub use rustc_data_structures::sso::SsoHashSet;
pub use rustc_data_structures::stack::ensure_sufficient_stack;
pub use rustc_data_structures::sync::Lrc;
}
#[cfg(not(feature = "nightly"))]
mod impl_ {
pub use indexmap::IndexMap;
pub use indexmap::IndexSet;
pub use std::collections::HashMap;
pub use std::collections::HashSet;
pub use std::collections::HashMap as SsoHashMap;
pub use std::collections::HashSet;
pub use std::collections::HashSet as SsoHashSet;
pub use std::sync::Arc as Lrc;
#[inline]
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
f()
}
}
pub use impl_::*;
pub use impl_::*;