Auto merge of #90408 - pierwill:untrack-localdefid-90317, r=cjgillot

Remove `PartialOrd`, `Ord` from `LocalDefId`

Part of work on https://github.com/rust-lang/rust/issues/90317.
This commit is contained in:
bors 2021-12-22 22:33:11 +00:00
commit e98309298d
13 changed files with 87 additions and 31 deletions

View file

@ -9,6 +9,7 @@
use crate::graph::vec_graph::VecGraph;
use crate::graph::{DirectedGraph, GraphSuccessors, WithNumEdges, WithNumNodes, WithSuccessors};
use rustc_index::vec::{Idx, IndexVec};
use std::cmp::Ord;
use std::ops::Range;
#[cfg(test)]
@ -38,7 +39,7 @@ struct SccData<S: Idx> {
all_successors: Vec<S>,
}
impl<N: Idx, S: Idx> Sccs<N, S> {
impl<N: Idx, S: Idx + Ord> Sccs<N, S> {
pub fn new(graph: &(impl DirectedGraph<Node = N> + WithNumNodes + WithSuccessors)) -> Self {
SccsConstruction::construct(graph)
}
@ -85,7 +86,7 @@ impl<N: Idx, S: Idx> DirectedGraph for Sccs<N, S> {
type Node = S;
}
impl<N: Idx, S: Idx> WithNumNodes for Sccs<N, S> {
impl<N: Idx, S: Idx + Ord> WithNumNodes for Sccs<N, S> {
fn num_nodes(&self) -> usize {
self.num_sccs()
}
@ -103,7 +104,7 @@ impl<'graph, N: Idx, S: Idx> GraphSuccessors<'graph> for Sccs<N, S> {
type Iter = std::iter::Cloned<std::slice::Iter<'graph, S>>;
}
impl<N: Idx, S: Idx> WithSuccessors for Sccs<N, S> {
impl<N: Idx, S: Idx + Ord> WithSuccessors for Sccs<N, S> {
fn successors(&self, node: S) -> <Self as GraphSuccessors<'_>>::Iter {
self.successors(node).iter().cloned()
}

View file

@ -1,3 +1,5 @@
use std::cmp::Ord;
use crate::graph::{DirectedGraph, GraphSuccessors, WithNumEdges, WithNumNodes, WithSuccessors};
use rustc_index::vec::{Idx, IndexVec};
@ -17,7 +19,7 @@ pub struct VecGraph<N: Idx> {
edge_targets: Vec<N>,
}
impl<N: Idx> VecGraph<N> {
impl<N: Idx + Ord> VecGraph<N> {
pub fn new(num_nodes: usize, mut edge_pairs: Vec<(N, N)>) -> Self {
// Sort the edges by the source -- this is important.
edge_pairs.sort();
@ -100,7 +102,7 @@ impl<'graph, N: Idx> GraphSuccessors<'graph> for VecGraph<N> {
type Iter = std::iter::Cloned<std::slice::Iter<'graph, N>>;
}
impl<N: Idx> WithSuccessors for VecGraph<N> {
impl<N: Idx + Ord> WithSuccessors for VecGraph<N> {
fn successors(&self, node: N) -> <Self as GraphSuccessors<'_>>::Iter {
self.successors(node).iter().cloned()
}

View file

@ -101,7 +101,11 @@ pub fn enumerated_keys_and_path_hashes(
pub struct Definitions {
table: DefPathTable,
// FIXME(eddyb) ideally all `LocalDefId`s would be HIR owners.
/// Only [`LocalDefId`]s for items and item-like are HIR owners.
/// The associated `HirId` has a `local_id` of `0`.
/// Generic parameters and closures are also assigned a `LocalDefId` but are not HIR owners.
/// Their `HirId`s are defined by their position while lowering the enclosing owner.
// FIXME(cjgillot) Some `LocalDefId`s from `use` items are dropped during lowering and lack a `HirId`.
pub(super) def_id_to_hir_id: IndexVec<LocalDefId, Option<hir::HirId>>,
/// The reverse mapping of `def_id_to_hir_id`.
pub(super) hir_id_to_def_id: FxHashMap<hir::HirId, LocalDefId>,

View file

@ -1203,7 +1203,7 @@ pub enum UnsafeSource {
UserProvided,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Hash, Debug)]
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Hash, Debug)]
pub struct BodyId {
pub hir_id: HirId,
}
@ -1980,7 +1980,7 @@ pub struct FnSig<'hir> {
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the hir-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Debug)]
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Debug)]
pub struct TraitItemId {
pub def_id: LocalDefId,
}
@ -2043,7 +2043,7 @@ pub enum TraitItemKind<'hir> {
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the hir-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Debug)]
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Debug)]
pub struct ImplItemId {
pub def_id: LocalDefId,
}
@ -2644,7 +2644,7 @@ pub fn ctor_hir_id(&self) -> Option<HirId> {
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the hir-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Debug, Hash)]
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Debug, Hash)]
pub struct ItemId {
pub def_id: LocalDefId,
}
@ -2883,7 +2883,7 @@ pub enum AssocItemKind {
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the hir-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Debug)]
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Debug)]
pub struct ForeignItemId {
pub def_id: LocalDefId,
}

View file

@ -11,7 +11,7 @@
/// the `local_id` part of the `HirId` changing, which is a very useful property in
/// incremental compilation where we have to persist things through changes to
/// the code base.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[derive(Encodable, Decodable)]
pub struct HirId {
pub owner: LocalDefId,
@ -32,6 +32,10 @@ pub fn as_owner(self) -> Option<LocalDefId> {
pub fn make_owner(owner: LocalDefId) -> Self {
Self { owner, local_id: ItemLocalId::from_u32(0) }
}
pub fn index(self) -> (usize, usize) {
(rustc_index::vec::Idx::index(self.owner), rustc_index::vec::Idx::index(self.local_id))
}
}
impl fmt::Display for HirId {
@ -40,6 +44,18 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
}
}
impl Ord for HirId {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
(self.index()).cmp(&(other.index()))
}
}
impl PartialOrd for HirId {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(&other))
}
}
rustc_data_structures::define_id_collections!(HirIdMap, HirIdSet, HirId);
rustc_data_structures::define_id_collections!(ItemLocalMap, ItemLocalSet, ItemLocalId);

View file

@ -675,7 +675,7 @@ fn contains(&self, elem: T) -> bool {
fn insert(&mut self, elem: T) -> bool {
assert!(elem.index() < self.domain_size);
let changed = if let Some(i) = self.elems.iter().position(|&e| e >= elem) {
let changed = if let Some(i) = self.elems.iter().position(|&e| e.index() >= elem.index()) {
if self.elems[i] == elem {
// `elem` is already in the set.
false
@ -715,6 +715,10 @@ fn iter(&self) -> slice::Iter<'_, T> {
self.elems.iter()
}
bit_relations_inherent_impls! {}
}
impl<T: Idx + Ord> SparseBitSet<T> {
fn last_set_in(&self, range: impl RangeBounds<T>) -> Option<T> {
let mut last_leq = None;
for e in self.iter() {
@ -724,8 +728,6 @@ fn last_set_in(&self, range: impl RangeBounds<T>) -> Option<T> {
}
last_leq
}
bit_relations_inherent_impls! {}
}
/// A fixed-size bitset type with a hybrid representation: sparse when there
@ -802,7 +804,10 @@ pub fn is_empty(&self) -> bool {
/// Returns the previous element present in the bitset from `elem`,
/// inclusively of elem. That is, will return `Some(elem)` if elem is in the
/// bitset.
pub fn last_set_in(&self, range: impl RangeBounds<T>) -> Option<T> {
pub fn last_set_in(&self, range: impl RangeBounds<T>) -> Option<T>
where
T: Ord,
{
match self {
HybridBitSet::Sparse(sparse) => sparse.last_set_in(range),
HybridBitSet::Dense(dense) => dense.last_set_in(range),

View file

@ -12,7 +12,7 @@
/// Represents some newtyped `usize` wrapper.
///
/// Purpose: avoid mixing indexes for different bitvector domains.
pub trait Idx: Copy + 'static + Ord + Debug + Hash {
pub trait Idx: Copy + 'static + Eq + PartialEq + Debug + Hash {
fn new(idx: usize) -> Self;
fn index(self) -> usize;

View file

@ -1306,7 +1306,7 @@ fn encode_mir(&mut self) {
})
.collect::<Vec<_>>();
// Sort everything to ensure a stable order for diagnotics.
keys_and_jobs.sort_by_key(|&(def_id, _, _)| def_id);
keys_and_jobs.sort_by_key(|&(def_id, _, _)| def_id.index());
for (def_id, encode_const, encode_opt) in keys_and_jobs.into_iter() {
debug_assert!(encode_const || encode_opt);

View file

@ -6,7 +6,7 @@
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc_hir::{HirId, ItemId};
use rustc_hir::ItemId;
use rustc_query_system::ich::{NodeIdHashingMode, StableHashingContext};
use rustc_session::config::OptLevel;
use rustc_span::source_map::Span;
@ -355,7 +355,7 @@ pub fn items_in_deterministic_order(
// The codegen tests rely on items being process in the same order as
// they appear in the file, so for local items, we sort by node_id first
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct ItemSortKey<'tcx>(Option<HirId>, SymbolName<'tcx>);
pub struct ItemSortKey<'tcx>(Option<usize>, SymbolName<'tcx>);
fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'tcx> {
ItemSortKey(
@ -366,10 +366,7 @@ fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'
// instances into account. The others don't matter for
// the codegen tests and can even make item order
// unstable.
InstanceDef::Item(def) => def
.did
.as_local()
.map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id)),
InstanceDef::Item(def) => Some(def.did.index.as_usize()),
InstanceDef::VtableShim(..)
| InstanceDef::ReifyShim(..)
| InstanceDef::Intrinsic(..)
@ -380,10 +377,10 @@ fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'
| InstanceDef::CloneShim(..) => None,
}
}
MonoItem::Static(def_id) => {
def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
MonoItem::Static(def_id) => Some(def_id.index.as_usize()),
MonoItem::GlobalAsm(item_id) => {
Some(item_id.def_id.to_def_id().index.as_usize())
}
MonoItem::GlobalAsm(item_id) => Some(item_id.hir_id()),
},
item.symbol_name(tcx),
)

View file

@ -20,7 +20,7 @@
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, TyEncodable, TyDecodable)]
pub enum SimplifiedTypeGen<D>
where
D: Copy + Debug + Ord + Eq,
D: Copy + Debug + Eq,
{
BoolSimplifiedType,
CharSimplifiedType,

View file

@ -212,7 +212,8 @@ fn check_mir_is_available(
// a lower `HirId` than the callee. This ensures that the callee will
// not inline us. This trick only works without incremental compilation.
// So don't do it if that is enabled.
if !self.tcx.dep_graph.is_fully_enabled() && self.hir_id < callee_hir_id {
if !self.tcx.dep_graph.is_fully_enabled() && self.hir_id.index() < callee_hir_id.index()
{
return Ok(());
}

View file

@ -322,7 +322,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// few cases where we know that only DefIds from the local crate are expected
/// and a DefId from a different crate would signify a bug somewhere. This
/// is when LocalDefId comes in handy.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct LocalDefId {
pub local_def_index: DefIndex,
}

View file

@ -424,7 +424,7 @@ pub fn inline_asm_source_code(src: &str) -> FileName {
/// `SpanData` is public because `Span` uses a thread-local interner and can't be
/// sent to other threads, but some pieces of performance infra run in a separate thread.
/// Using `Span` is generally preferred.
#[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)]
#[derive(Clone, Copy, Hash, PartialEq, Eq)]
pub struct SpanData {
pub lo: BytePos,
pub hi: BytePos,
@ -434,6 +434,36 @@ pub struct SpanData {
pub parent: Option<LocalDefId>,
}
// Order spans by position in the file.
impl Ord for SpanData {
fn cmp(&self, other: &Self) -> Ordering {
let SpanData {
lo: s_lo,
hi: s_hi,
ctxt: s_ctxt,
// `LocalDefId` does not implement `Ord`.
// The other fields are enough to determine in-file order.
parent: _,
} = self;
let SpanData {
lo: o_lo,
hi: o_hi,
ctxt: o_ctxt,
// `LocalDefId` does not implement `Ord`.
// The other fields are enough to determine in-file order.
parent: _,
} = other;
(s_lo, s_hi, s_ctxt).cmp(&(o_lo, o_hi, o_ctxt))
}
}
impl PartialOrd for SpanData {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl SpanData {
#[inline]
pub fn span(&self) -> Span {