Auto merge of #82103 - Dylan-DPC:rollup-5wv8rid, r=Dylan-DPC

Rollup of 11 pull requests

Successful merges:

 - #80523 (#[doc(inline)] sym_generated)
 - #80920 (Visit more targets when validating attributes)
 - #81720 (Updated smallvec version due to RUSTSEC-2021-0003)
 - #81891 ([rustdoc-json] Make `header` a vec of modifiers, and FunctionPointer consistent)
 - #81912 (Implement the precise analysis pass for lint `disjoint_capture_drop_reorder`)
 - #81914 (Fixing bad suggestion for `_` in `const` type when a function #81885)
 - #81919 (BTreeMap: fix internal comments)
 - #81927 (Add a regression test for #32498)
 - #81965 (Fix MIR pretty printer for non-local DefIds)
 - #82029 (Use debug log level for developer oriented logs)
 - #82056 (fix ice (#82032))

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2021-02-14 22:26:21 +00:00
commit 9503ea19ed
64 changed files with 903 additions and 165 deletions

View file

@ -593,7 +593,7 @@ dependencies = [
"rustc-semver",
"semver 0.11.0",
"serde",
"smallvec 1.4.2",
"smallvec 1.6.1",
"syn",
"toml",
"unicode-normalization",
@ -2086,7 +2086,7 @@ checksum = "22bf8d885d073610aee20e7fa205c4341ed32a761dbde96da5fd96301a8d3e82"
dependencies = [
"parking_lot",
"rustc-hash",
"smallvec 1.4.2",
"smallvec 1.6.1",
]
[[package]]
@ -2236,7 +2236,7 @@ dependencies = [
"rustc-workspace-hack",
"rustc_version",
"shell-escape",
"smallvec 1.4.2",
"smallvec 1.6.1",
]
[[package]]
@ -2459,7 +2459,7 @@ dependencies = [
"instant",
"libc",
"redox_syscall",
"smallvec 1.4.2",
"smallvec 1.6.1",
"winapi 0.3.9",
]
@ -3110,7 +3110,7 @@ version = "705.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93575affa286089b92c8208aea4e60fe9fdd251a619a09b566d6e4e2cc123212"
dependencies = [
"smallvec 1.4.2",
"smallvec 1.6.1",
]
[[package]]
@ -3126,7 +3126,7 @@ dependencies = [
"rustc-ap-rustc_macros",
"rustc-ap-rustc_serialize",
"rustc-ap-rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3201,7 +3201,7 @@ dependencies = [
"rustc-hash",
"rustc-rayon",
"rustc-rayon-core",
"smallvec 1.4.2",
"smallvec 1.6.1",
"stable_deref_trait",
"stacker",
"tempfile",
@ -3249,7 +3249,7 @@ dependencies = [
"rustc-ap-rustc_serialize",
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3337,7 +3337,7 @@ dependencies = [
"rustc-ap-rustc_lexer",
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
"unicode-normalization",
]
@ -3349,7 +3349,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc232e2a351d8131c8f1386ce372ee22ef7b1b0b897bbf817a8ce4792029a564"
dependencies = [
"indexmap",
"smallvec 1.4.2",
"smallvec 1.6.1",
]
[[package]]
@ -3497,8 +3497,8 @@ dependencies = [
"quote",
"serde",
"serde_json",
"smallvec 0.6.13",
"smallvec 1.4.2",
"smallvec 0.6.14",
"smallvec 1.6.1",
"syn",
"url 2.1.1",
"winapi 0.3.9",
@ -3509,14 +3509,14 @@ name = "rustc_apfloat"
version = "0.0.0"
dependencies = [
"bitflags",
"smallvec 1.4.2",
"smallvec 1.6.1",
]
[[package]]
name = "rustc_arena"
version = "0.0.0"
dependencies = [
"smallvec 1.4.2",
"smallvec 1.6.1",
]
[[package]]
@ -3530,7 +3530,7 @@ dependencies = [
"rustc_macros",
"rustc_serialize",
"rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3548,7 +3548,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3611,7 +3611,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3639,7 +3639,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"smallvec 1.4.2",
"smallvec 1.6.1",
"snap",
"tracing",
]
@ -3697,7 +3697,7 @@ dependencies = [
"rustc_index",
"rustc_macros",
"rustc_serialize",
"smallvec 1.4.2",
"smallvec 1.6.1",
"stable_deref_trait",
"stacker",
"tempfile",
@ -3778,7 +3778,7 @@ dependencies = [
"rustc_serialize",
"rustc_session",
"rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3810,7 +3810,7 @@ dependencies = [
"rustc_serialize",
"rustc_span",
"rustc_target",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3868,7 +3868,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -3909,7 +3909,7 @@ dependencies = [
"rustc_traits",
"rustc_ty_utils",
"rustc_typeck",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tempfile",
"tracing",
"winapi 0.3.9",
@ -3999,7 +3999,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"smallvec 1.4.2",
"smallvec 1.6.1",
"snap",
"stable_deref_trait",
"tracing",
@ -4031,7 +4031,7 @@ dependencies = [
"rustc_span",
"rustc_target",
"rustc_type_ir",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4062,7 +4062,7 @@ dependencies = [
"rustc_span",
"rustc_target",
"rustc_trait_selection",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4085,7 +4085,7 @@ dependencies = [
"rustc_span",
"rustc_target",
"rustc_trait_selection",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4102,7 +4102,7 @@ dependencies = [
"rustc_lexer",
"rustc_session",
"rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
"unicode-normalization",
]
@ -4178,7 +4178,7 @@ dependencies = [
"rustc_macros",
"rustc_serialize",
"rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4202,7 +4202,7 @@ dependencies = [
"rustc_middle",
"rustc_session",
"rustc_span",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4231,7 +4231,7 @@ version = "0.0.0"
dependencies = [
"indexmap",
"rustc_macros",
"smallvec 1.4.2",
"smallvec 1.6.1",
]
[[package]]
@ -4328,7 +4328,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4348,7 +4348,7 @@ dependencies = [
"rustc_middle",
"rustc_span",
"rustc_trait_selection",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4398,7 +4398,7 @@ dependencies = [
"rustc_span",
"rustc_target",
"rustc_trait_selection",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tracing",
]
@ -4425,7 +4425,7 @@ dependencies = [
"rustdoc-json-types",
"serde",
"serde_json",
"smallvec 1.4.2",
"smallvec 1.6.1",
"tempfile",
]
@ -4759,18 +4759,18 @@ checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
[[package]]
name = "smallvec"
version = "0.6.13"
version = "0.6.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6"
checksum = "b97fcaeba89edba30f044a10c6a3cc39df9c3f17d7cd829dd1446cab35f890e0"
dependencies = [
"maybe-uninit",
]
[[package]]
name = "smallvec"
version = "1.4.2"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252"
checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
[[package]]
name = "snap"
@ -5237,7 +5237,7 @@ dependencies = [
"serde",
"serde_json",
"sharded-slab",
"smallvec 1.4.2",
"smallvec 1.6.1",
"thread_local",
"tracing",
"tracing-core",

View file

@ -6,4 +6,4 @@ edition = "2018"
[dependencies]
bitflags = "1.2.1"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -5,4 +5,4 @@ version = "0.0.0"
edition = "2018"
[dependencies]
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -15,5 +15,5 @@ rustc_data_structures = { path = "../rustc_data_structures" }
rustc_index = { path = "../rustc_index" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_macros = { path = "../rustc_macros" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
bitflags = "1.2.1"

View file

@ -19,4 +19,4 @@ rustc_span = { path = "../rustc_span" }
rustc_errors = { path = "../rustc_errors" }
rustc_session = { path = "../rustc_session" }
rustc_ast = { path = "../rustc_ast" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -19,7 +19,7 @@ rustc_lexer = { path = "../rustc_lexer" }
rustc_parse = { path = "../rustc_parse" }
rustc_target = { path = "../rustc_target" }
rustc_session = { path = "../rustc_session" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_ast = { path = "../rustc_ast" }
rustc_expand = { path = "../rustc_expand" }
rustc_span = { path = "../rustc_span" }

View file

@ -29,6 +29,6 @@ rustc_llvm = { path = "../rustc_llvm" }
rustc_session = { path = "../rustc_session" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_ast = { path = "../rustc_ast" }
rustc_span = { path = "../rustc_span" }

View file

@ -22,7 +22,7 @@ stable_deref_trait = "1.0.0"
rayon = { version = "0.3.0", package = "rustc-rayon" }
rayon-core = { version = "0.3.0", package = "rustc-rayon-core" }
rustc-hash = "1.1.0"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_index = { path = "../rustc_index", package = "rustc_index" }
bitflags = "1.2.1"
measureme = "9.0.0"

View file

@ -23,5 +23,5 @@ rustc_macros = { path = "../rustc_macros" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_parse = { path = "../rustc_parse" }
rustc_session = { path = "../rustc_session" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_ast = { path = "../rustc_ast" }

View file

@ -17,4 +17,4 @@ rustc_span = { path = "../rustc_span" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_ast = { path = "../rustc_ast" }
tracing = "0.1"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -54,6 +54,7 @@ pub enum Target {
ForeignTy,
GenericParam(GenericParamKind),
MacroDef,
Param,
}
impl Display for Target {
@ -96,6 +97,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
GenericParamKind::Const => "const parameter",
},
Target::MacroDef => "macro def",
Target::Param => "function param",
}
)
}

View file

@ -20,5 +20,5 @@ rustc_session = { path = "../rustc_session" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_ast = { path = "../rustc_ast" }

View file

@ -11,7 +11,7 @@ doctest = false
libc = "0.2"
tracing = "0.1"
rayon = { version = "0.3.0", package = "rustc-rayon" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_ast = { path = "../rustc_ast" }
rustc_attr = { path = "../rustc_attr" }
rustc_builtin_macros = { path = "../rustc_builtin_macros" }

View file

@ -12,7 +12,7 @@ libc = "0.2"
snap = "1"
tracing = "0.1"
memmap = "0.7"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_middle = { path = "../rustc_middle" }
rustc_attr = { path = "../rustc_attr" }
rustc_data_structures = { path = "../rustc_data_structures" }

View file

@ -27,7 +27,7 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_ast = { path = "../rustc_ast" }
rustc_span = { path = "../rustc_span" }
chalk-ir = "0.55.0"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
measureme = "9.0.0"
rustc_session = { path = "../rustc_session" }
rustc_type_ir = { path = "../rustc_type_ir" }

View file

@ -31,7 +31,7 @@ rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_ast = { path = "../rustc_ast" }
rustc_span = { path = "../rustc_span" }
rustc_apfloat = { path = "../rustc_apfloat" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
[dev-dependencies]
coverage_test_macros = { path = "src/transform/coverage/test_macros" }

View file

@ -1,6 +1,7 @@
use rustc_hir as hir;
use rustc_hir::Node;
use rustc_index::vec::Idx;
use rustc_middle::hir::map::Map;
use rustc_middle::mir::{Mutability, Place, PlaceRef, ProjectionElem};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::{
@ -543,13 +544,24 @@ fn show_mutating_upvar(
// Attempt to search similar mutable associated items for suggestion.
// In the future, attempt in all path but initially for RHS of for_loop
fn suggest_similar_mut_method_for_for_loop(&self, err: &mut DiagnosticBuilder<'_>) {
let hir = self.infcx.tcx.hir();
let node = hir.item(self.mir_hir_id());
use hir::{
Expr,
BodyId, Expr,
ExprKind::{Block, Call, DropTemps, Match, MethodCall},
HirId, ImplItem, ImplItemKind, Item, ItemKind,
};
if let hir::ItemKind::Fn(_, _, body_id) = node.kind {
fn maybe_body_id_of_fn(hir_map: &Map<'tcx>, id: HirId) -> Option<BodyId> {
match hir_map.find(id) {
Some(Node::Item(Item { kind: ItemKind::Fn(_, _, body_id), .. }))
| Some(Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(_, body_id), .. })) => {
Some(*body_id)
}
_ => None,
}
}
let hir_map = self.infcx.tcx.hir();
let mir_body_hir_id = self.mir_hir_id();
if let Some(fn_body_id) = maybe_body_id_of_fn(&hir_map, mir_body_hir_id) {
if let Block(
hir::Block {
expr:
@ -579,7 +591,7 @@ fn suggest_similar_mut_method_for_for_loop(&self, err: &mut DiagnosticBuilder<'_
..
},
_,
) = hir.body(body_id).value.kind
) = hir_map.body(fn_body_id).value.kind
{
let opt_suggestions = path_segment
.hir_id

View file

@ -159,7 +159,7 @@ fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range<BasicBl
}
}
#[instrument(skip(self, caller_body))]
#[instrument(level = "debug", skip(self, caller_body))]
fn is_mir_available(&self, callee: Instance<'tcx>, caller_body: &Body<'tcx>) -> bool {
match callee.def {
InstanceDef::Item(_) => {
@ -258,7 +258,7 @@ fn get_valid_function_call(
None
}
#[instrument(skip(self, callee_body))]
#[instrument(level = "debug", skip(self, callee_body))]
fn should_inline(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool {
let tcx = self.tcx;

View file

@ -7,7 +7,7 @@
// FIXME: check whether it is cheaper to precompute the entire call graph instead of invoking
// this query riddiculously often.
#[instrument(skip(tcx, root, target))]
#[instrument(level = "debug", skip(tcx, root, target))]
crate fn mir_callgraph_reachable(
tcx: TyCtxt<'tcx>,
(root, target): (ty::Instance<'tcx>, LocalDefId),
@ -27,7 +27,10 @@
!tcx.is_constructor(root.def_id()),
"you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
);
#[instrument(skip(tcx, param_env, target, stack, seen, recursion_limiter, caller))]
#[instrument(
level = "debug",
skip(tcx, param_env, target, stack, seen, recursion_limiter, caller)
)]
fn process(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,

View file

@ -289,19 +289,19 @@ pub fn write_mir_pretty<'tcx>(
}
Ok(())
};
match tcx.hir().body_const_context(def_id.expect_local()) {
None => render_body(w, tcx.optimized_mir(def_id))?,
// For `const fn` we want to render the optimized MIR. If you want the mir used in
// ctfe, you can dump the MIR after the `Deaggregator` optimization pass.
Some(rustc_hir::ConstContext::ConstFn) => {
render_body(w, tcx.optimized_mir(def_id))?;
writeln!(w)?;
writeln!(w, "// MIR FOR CTFE")?;
// Do not use `render_body`, as that would render the promoteds again, but these
// are shared between mir_for_ctfe and optimized_mir
write_mir_fn(tcx, tcx.mir_for_ctfe(def_id), &mut |_, _| Ok(()), w)?;
}
Some(_) => render_body(w, tcx.mir_for_ctfe(def_id))?,
// For `const fn` we want to render both the optimized MIR and the MIR for ctfe.
if tcx.is_const_fn_raw(def_id) {
render_body(w, tcx.optimized_mir(def_id))?;
writeln!(w)?;
writeln!(w, "// MIR FOR CTFE")?;
// Do not use `render_body`, as that would render the promoteds again, but these
// are shared between mir_for_ctfe and optimized_mir
write_mir_fn(tcx, tcx.mir_for_ctfe(def_id), &mut |_, _| Ok(()), w)?;
} else {
let instance_mir =
tcx.instance_mir(ty::InstanceDef::Item(ty::WithOptConstParam::unknown(def_id)));
render_body(w, instance_mir)?;
}
}
Ok(())

View file

@ -24,4 +24,4 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_ast = { path = "../rustc_ast" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -1079,7 +1079,10 @@ fn apply_constructor<'p>(
/// `is_under_guard` is used to inform if the pattern has a guard. If it
/// has one it must not be inserted into the matrix. This shouldn't be
/// relied on for soundness.
#[instrument(skip(cx, matrix, witness_preference, hir_id, is_under_guard, is_top_level))]
#[instrument(
level = "debug",
skip(cx, matrix, witness_preference, hir_id, is_under_guard, is_top_level)
)]
fn is_useful<'p, 'tcx>(
cx: &MatchCheckCtxt<'p, 'tcx>,
matrix: &Matrix<'p, 'tcx>,

View file

@ -19,4 +19,4 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_ast = { path = "../rustc_ast" }
unicode-normalization = "0.1.11"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -1101,17 +1101,6 @@ fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
intravisit::walk_arm(self, arm);
}
fn visit_macro_def(&mut self, macro_def: &'tcx hir::MacroDef<'tcx>) {
self.check_attributes(
macro_def.hir_id,
&macro_def.attrs,
&macro_def.span,
Target::MacroDef,
None,
);
intravisit::walk_macro_def(self, macro_def);
}
fn visit_foreign_item(&mut self, f_item: &'tcx ForeignItem<'tcx>) {
let target = Target::from_foreign_item(f_item);
self.check_attributes(
@ -1157,6 +1146,23 @@ fn visit_variant(
self.check_attributes(variant.id, variant.attrs, &variant.span, Target::Variant, None);
intravisit::walk_variant(self, variant, generics, item_id)
}
fn visit_macro_def(&mut self, macro_def: &'tcx hir::MacroDef<'tcx>) {
self.check_attributes(
macro_def.hir_id,
macro_def.attrs,
&macro_def.span,
Target::MacroDef,
None,
);
intravisit::walk_macro_def(self, macro_def);
}
fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
self.check_attributes(param.hir_id, param.attrs, &param.span, Target::Param, None);
intravisit::walk_param(self, param);
}
}
fn is_c_like_enum(item: &Item<'_>) -> bool {

View file

@ -18,4 +18,4 @@ rustc_index = { path = "../rustc_index" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
parking_lot = "0.11"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -26,4 +26,4 @@ rustc_index = { path = "../rustc_index" }
rustc_metadata = { path = "../rustc_metadata" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -6,7 +6,7 @@ edition = "2018"
[dependencies]
indexmap = "1"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
[dev-dependencies]
rustc_macros = { path = "../rustc_macros" }

View file

@ -1405,8 +1405,8 @@ fn span_data_to_lines_and_cols(
});
if modified {
info!("Set disambiguator for {:?} (hash {:?})", expn_id, first_hash);
info!("expn_data = {:?}", expn_id.expn_data());
debug!("Set disambiguator for {:?} (hash {:?})", expn_id, first_hash);
debug!("expn_data = {:?}", expn_id.expn_data());
// Verify that the new disambiguator makes the hash unique
#[cfg(debug_assertions)]

View file

@ -1604,6 +1604,7 @@ pub mod sym {
use super::Symbol;
use std::convert::TryInto;
#[doc(inline)]
pub use super::sym_generated::*;
// Used from a macro in `librustc_feature/accepted.rs`

View file

@ -22,4 +22,4 @@ rustc_macros = { path = "../rustc_macros" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }

View file

@ -91,7 +91,7 @@ pub fn codegen_fulfill_obligation<'tcx>(
});
let impl_source = drain_fulfillment_cx_or_panic(&infcx, &mut fulfill_cx, impl_source);
info!("Cache miss: {:?} => {:?}", trait_ref, impl_source);
debug!("Cache miss: {:?} => {:?}", trait_ref, impl_source);
Ok(impl_source)
})
}

View file

@ -647,7 +647,7 @@ fn process_trait_obligation(
ProcessResult::Unchanged
}
Err(selection_err) => {
info!("selecting trait at depth {} yielded Err", obligation.recursion_depth);
debug!("selecting trait at depth {} yielded Err", obligation.recursion_depth);
ProcessResult::Error(CodeSelectionError(selection_err))
}

View file

@ -16,6 +16,6 @@ rustc_span = { path = "../rustc_span" }
chalk-ir = "0.55.0"
chalk-solve = "0.55.0"
chalk-engine = "0.55.0"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_infer = { path = "../rustc_infer" }
rustc_trait_selection = { path = "../rustc_trait_selection" }

View file

@ -80,7 +80,7 @@ fn dropck_outlives<'tcx>(
let cause = ObligationCause::dummy();
let mut constraints = DtorckConstraint::empty();
while let Some((ty, depth)) = ty_stack.pop() {
info!(
debug!(
"{} kinds, {} overflows, {} ty_stack",
result.kinds.len(),
result.overflows.len(),

View file

@ -20,7 +20,7 @@ rustc_hir = { path = "../rustc_hir" }
rustc_hir_pretty = { path = "../rustc_hir_pretty" }
rustc_target = { path = "../rustc_target" }
rustc_session = { path = "../rustc_session" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
rustc_ast = { path = "../rustc_ast" }
rustc_span = { path = "../rustc_span" }
rustc_index = { path = "../rustc_index" }

View file

@ -2191,12 +2191,14 @@ fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool) -> Ty<'tcx> {
}
hir::TyKind::BareFn(ref bf) => {
require_c_abi_if_c_variadic(tcx, &bf.decl, bf.abi, ast_ty.span);
tcx.mk_fn_ptr(self.ty_of_fn(
bf.unsafety,
bf.abi,
&bf.decl,
&hir::Generics::empty(),
None,
Some(ast_ty),
))
}
hir::TyKind::TraitObject(ref bounds, ref lifetime) => {
@ -2336,6 +2338,7 @@ pub fn ty_of_fn(
decl: &hir::FnDecl<'_>,
generics: &hir::Generics<'_>,
ident_span: Option<Span>,
hir_ty: Option<&hir::Ty<'_>>,
) -> ty::PolyFnSig<'tcx> {
debug!("ty_of_fn");
@ -2367,12 +2370,14 @@ pub fn ty_of_fn(
// only want to emit an error complaining about them if infer types (`_`) are not
// allowed. `allow_ty_infer` gates this behavior. We check for the presence of
// `ident_span` to not emit an error twice when we have `fn foo(_: fn() -> _)`.
crate::collect::placeholder_type_error(
tcx,
ident_span.map(|sp| sp.shrink_to_hi()),
&generics.params[..],
visitor.0,
true,
hir_ty,
);
}

View file

@ -502,6 +502,7 @@ fn typeck_with_fallback<'tcx>(
decl,
&hir::Generics::empty(),
None,
None,
)
} else {
tcx.fn_sig(def_id)

View file

@ -40,13 +40,16 @@
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_infer::infer::UpvarRegion;
use rustc_middle::hir::place::{Place, PlaceBase, PlaceWithHirId, ProjectionKind};
use rustc_middle::hir::place::{Place, PlaceBase, PlaceWithHirId, Projection, ProjectionKind};
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeckResults, UpvarSubsts};
use rustc_session::lint;
use rustc_span::sym;
use rustc_span::{MultiSpan, Span, Symbol};
use rustc_index::vec::Idx;
use rustc_target::abi::VariantIdx;
/// Describe the relationship between the paths of two places
/// eg:
/// - `foo` is ancestor of `foo.bar.baz`
@ -535,7 +538,7 @@ fn perform_2229_migration_anaysis(
span: Span,
body: &'tcx hir::Body<'tcx>,
) {
let need_migrations = self.compute_2229_migrations_first_pass(
let need_migrations = self.compute_2229_migrations(
closure_def_id,
span,
capture_clause,
@ -544,9 +547,7 @@ fn perform_2229_migration_anaysis(
);
if !need_migrations.is_empty() {
let need_migrations_hir_id = need_migrations.iter().map(|m| m.0).collect::<Vec<_>>();
let migrations_text = migration_suggestion_for_2229(self.tcx, &need_migrations_hir_id);
let migrations_text = migration_suggestion_for_2229(self.tcx, &need_migrations);
let local_def_id = closure_def_id.expect_local();
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id);
@ -573,15 +574,15 @@ fn perform_2229_migration_anaysis(
/// - It would have been moved into the closure when `capture_disjoint_fields` wasn't
/// enabled, **and**
/// - It wasn't completely captured by the closure, **and**
/// - The type of the root variable needs Drop.
fn compute_2229_migrations_first_pass(
/// - One of the paths starting at this root variable, that is not captured needs Drop.
fn compute_2229_migrations(
&self,
closure_def_id: DefId,
closure_span: Span,
closure_clause: hir::CaptureBy,
body: &'tcx hir::Body<'tcx>,
min_captures: Option<&ty::RootVariableMinCaptureList<'tcx>>,
) -> Vec<(hir::HirId, Ty<'tcx>)> {
) -> Vec<hir::HirId> {
fn resolve_ty<T: TypeFoldable<'tcx>>(
fcx: &FnCtxt<'_, 'tcx>,
span: Span,
@ -617,7 +618,7 @@ fn resolve_ty<T: TypeFoldable<'tcx>>(
match closure_clause {
// Only migrate if closure is a move closure
hir::CaptureBy::Value => need_migrations.push((var_hir_id, ty)),
hir::CaptureBy::Value => need_migrations.push(var_hir_id),
hir::CaptureBy::Ref => {}
}
@ -625,21 +626,277 @@ fn resolve_ty<T: TypeFoldable<'tcx>>(
continue;
};
let is_moved = root_var_min_capture_list
let projections_list = root_var_min_capture_list
.iter()
.any(|capture| matches!(capture.info.capture_kind, ty::UpvarCapture::ByValue(_)));
.filter_map(|captured_place| match captured_place.info.capture_kind {
// Only care about captures that are moved into the closure
ty::UpvarCapture::ByValue(..) => {
Some(captured_place.place.projections.as_slice())
}
ty::UpvarCapture::ByRef(..) => None,
})
.collect::<Vec<_>>();
let is_moved = !projections_list.is_empty();
let is_not_completely_captured =
root_var_min_capture_list.iter().any(|capture| capture.place.projections.len() > 0);
if is_moved && is_not_completely_captured {
need_migrations.push((var_hir_id, ty));
if is_moved
&& is_not_completely_captured
&& self.has_significant_drop_outside_of_captures(
closure_def_id,
closure_span,
ty,
projections_list,
)
{
need_migrations.push(var_hir_id);
}
}
need_migrations
}
/// This is a helper function to `compute_2229_migrations_precise_pass`. Provided the type
/// of a root variable and a list of captured paths starting at this root variable (expressed
/// using list of `Projection` slices), it returns true if there is a path that is not
/// captured starting at this root variable that implements Drop.
///
/// FIXME(project-rfc-2229#35): This should return true only for significant drops.
/// A drop is significant if it's implemented by the user or does
/// anything that will have any observable behavior (other than
/// freeing up memory).
///
/// The way this function works is at a given call it looks at type `base_path_ty` of some base
/// path say P and then list of projection slices which represent the different captures moved
/// into the closure starting off of P.
///
/// This will make more sense with an example:
///
/// ```rust
/// #![feature(capture_disjoint_fields)]
///
/// struct FancyInteger(i32); // This implements Drop
///
/// struct Point { x: FancyInteger, y: FancyInteger }
/// struct Color;
///
/// struct Wrapper { p: Point, c: Color }
///
/// fn f(w: Wrapper) {
/// let c = || {
/// // Closure captures w.p.x and w.c by move.
/// };
///
/// c();
/// }
/// ```
///
/// If `capture_disjoint_fields` wasn't enabled the closure would've moved `w` instead of the
/// precise paths. If we look closely `w.p.y` isn't captured which implements Drop and
/// therefore Drop ordering would change and we want this function to return true.
///
/// Call stack to figure out if we need to migrate for `w` would look as follows:
///
/// Our initial base path is just `w`, and the paths captured from it are `w[p, x]` and
/// `w[c]`.
/// Notation:
/// - Ty(place): Type of place
/// - `(a, b)`: Represents the function parameters `base_path_ty` and `captured_projs`
/// respectively.
/// ```
/// (Ty(w), [ &[p, x], &[c] ])
/// |
/// ----------------------------
/// | |
/// v v
/// (Ty(w.p), [ &[x] ]) (Ty(w.c), [ &[] ]) // I(1)
/// | |
/// v v
/// (Ty(w.p), [ &[x] ]) false
/// |
/// |
/// -------------------------------
/// | |
/// v v
/// (Ty((w.p).x), [ &[] ]) (Ty((w.p).y), []) // IMP 2
/// | |
/// v v
/// false NeedsDrop(Ty(w.p.y))
/// |
/// v
/// true
/// ```
///
/// IMP 1 `(Ty(w.c), [ &[] ])`: Notice the single empty slice inside `captured_projs`.
/// This implies that the `w.c` is completely captured by the closure.
/// Since drop for this path will be called when the closure is
/// dropped we don't need to migrate for it.
///
/// IMP 2 `(Ty((w.p).y), [])`: Notice that `captured_projs` is empty. This implies that this
/// path wasn't captured by the closure. Also note that even
/// though we didn't capture this path, the function visits it,
/// which is kind of the point of this function. We then return
/// if the type of `w.p.y` implements Drop, which in this case is
/// true.
///
/// Consider another example:
///
/// ```rust
/// struct X;
/// impl Drop for X {}
///
/// struct Y(X);
/// impl Drop for Y {}
///
/// fn foo() {
/// let y = Y(X);
/// let c = || move(y.0);
/// }
/// ```
///
/// Note that `y.0` is captured by the closure. When this function is called for `y`, it will
/// return true, because even though all paths starting at `y` are captured, `y` itself
/// implements Drop which will be affected since `y` isn't completely captured.
fn has_significant_drop_outside_of_captures(
&self,
closure_def_id: DefId,
closure_span: Span,
base_path_ty: Ty<'tcx>,
captured_projs: Vec<&[Projection<'tcx>]>,
) -> bool {
let needs_drop = |ty: Ty<'tcx>| {
ty.needs_drop(self.tcx, self.tcx.param_env(closure_def_id.expect_local()))
};
let is_drop_defined_for_ty = |ty: Ty<'tcx>| {
let drop_trait = self.tcx.require_lang_item(hir::LangItem::Drop, Some(closure_span));
let ty_params = self.tcx.mk_substs_trait(base_path_ty, &[]);
self.tcx.type_implements_trait((
drop_trait,
ty,
ty_params,
self.tcx.param_env(closure_def_id.expect_local()),
))
};
let is_drop_defined_for_ty = is_drop_defined_for_ty(base_path_ty);
// If there is a case where no projection is applied on top of current place
// then there must be exactly one capture corresponding to such a case. Note that this
// represents the case of the path being completely captured by the variable.
//
// eg. If `a.b` is captured and we are processing `a.b`, then we can't have the closure also
// capture `a.b.c`, because that voilates min capture.
let is_completely_captured = captured_projs.iter().any(|projs| projs.is_empty());
assert!(!is_completely_captured || (captured_projs.len() == 1));
if is_completely_captured {
// The place is captured entirely, so doesn't matter if needs dtor, it will be drop
// when the closure is dropped.
return false;
}
if is_drop_defined_for_ty {
// If drop is implemented for this type then we need it to be fully captured,
// which we know it is not because of the previous check. Therefore we need to
// do migrate.
return true;
}
if captured_projs.is_empty() {
return needs_drop(base_path_ty);
}
match base_path_ty.kind() {
// Observations:
// - `captured_projs` is not empty. Therefore we can call
// `captured_projs.first().unwrap()` safely.
// - All entries in `captured_projs` have atleast one projection.
// Therefore we can call `captured_projs.first().unwrap().first().unwrap()` safely.
// We don't capture derefs in case of move captures, which would have be applied to
// access any further paths.
ty::Adt(def, _) if def.is_box() => unreachable!(),
ty::Ref(..) => unreachable!(),
ty::RawPtr(..) => unreachable!(),
ty::Adt(def, substs) => {
// Multi-varaint enums are captured in entirety,
// which would've been handled in the case of single empty slice in `captured_projs`.
assert_eq!(def.variants.len(), 1);
// Only Field projections can be applied to a non-box Adt.
assert!(
captured_projs.iter().all(|projs| matches!(
projs.first().unwrap().kind,
ProjectionKind::Field(..)
))
);
def.variants.get(VariantIdx::new(0)).unwrap().fields.iter().enumerate().any(
|(i, field)| {
let paths_using_field = captured_projs
.iter()
.filter_map(|projs| {
if let ProjectionKind::Field(field_idx, _) =
projs.first().unwrap().kind
{
if (field_idx as usize) == i { Some(&projs[1..]) } else { None }
} else {
unreachable!();
}
})
.collect();
let after_field_ty = field.ty(self.tcx, substs);
self.has_significant_drop_outside_of_captures(
closure_def_id,
closure_span,
after_field_ty,
paths_using_field,
)
},
)
}
ty::Tuple(..) => {
// Only Field projections can be applied to a tuple.
assert!(
captured_projs.iter().all(|projs| matches!(
projs.first().unwrap().kind,
ProjectionKind::Field(..)
))
);
base_path_ty.tuple_fields().enumerate().any(|(i, element_ty)| {
let paths_using_field = captured_projs
.iter()
.filter_map(|projs| {
if let ProjectionKind::Field(field_idx, _) = projs.first().unwrap().kind
{
if (field_idx as usize) == i { Some(&projs[1..]) } else { None }
} else {
unreachable!();
}
})
.collect();
self.has_significant_drop_outside_of_captures(
closure_def_id,
closure_span,
element_ty,
paths_using_field,
)
})
}
// Anything else would be completely captured and therefore handled already.
_ => unreachable!(),
}
}
fn init_capture_kind(
&self,
capture_clause: hir::CaptureBy,

View file

@ -143,6 +143,7 @@ struct CollectItemTypesVisitor<'tcx> {
generics: &[hir::GenericParam<'_>],
placeholder_types: Vec<Span>,
suggest: bool,
hir_ty: Option<&hir::Ty<'_>>,
) {
if placeholder_types.is_empty() {
return;
@ -173,12 +174,40 @@ struct CollectItemTypesVisitor<'tcx> {
}
let mut err = bad_placeholder_type(tcx, placeholder_types);
// Suggest, but only if it is not a function in const or static
if suggest {
err.multipart_suggestion(
"use type parameters instead",
sugg,
Applicability::HasPlaceholders,
);
let mut is_fn = false;
let mut is_const = false;
let mut is_static = false;
if let Some(hir_ty) = hir_ty {
if let hir::TyKind::BareFn(_) = hir_ty.kind {
is_fn = true;
// Check if parent is const or static
let parent_id = tcx.hir().get_parent_node(hir_ty.hir_id);
let parent_node = tcx.hir().get(parent_id);
if let hir::Node::Item(item) = parent_node {
if let hir::ItemKind::Const(_, _) = item.kind {
is_const = true;
} else if let hir::ItemKind::Static(_, _, _) = item.kind {
is_static = true;
}
}
}
}
// if function is wrapped around a const or static,
// then don't show the suggestion
if !(is_fn && (is_const || is_static)) {
err.multipart_suggestion(
"use type parameters instead",
sugg,
Applicability::HasPlaceholders,
);
}
}
err.emit();
}
@ -200,7 +229,14 @@ fn reject_placeholder_type_signatures_in_item(tcx: TyCtxt<'tcx>, item: &'tcx hir
let mut visitor = PlaceholderHirTyCollector::default();
visitor.visit_item(item);
placeholder_type_error(tcx, Some(generics.span), &generics.params[..], visitor.0, suggest);
placeholder_type_error(
tcx,
Some(generics.span),
&generics.params[..],
visitor.0,
suggest,
None,
);
}
impl Visitor<'tcx> for CollectItemTypesVisitor<'tcx> {
@ -682,6 +718,7 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::HirId) {
let it = tcx.hir().expect_item(item_id);
debug!("convert: item {} with id {}", it.ident, it.hir_id);
let def_id = tcx.hir().local_def_id(item_id);
match it.kind {
// These don't define types.
hir::ItemKind::ExternCrate(_)
@ -787,7 +824,7 @@ fn convert_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::HirId) {
// Account for `const C: _;`.
let mut visitor = PlaceholderHirTyCollector::default();
visitor.visit_trait_item(trait_item);
placeholder_type_error(tcx, None, &[], visitor.0, false);
placeholder_type_error(tcx, None, &[], visitor.0, false, None);
}
hir::TraitItemKind::Type(_, Some(_)) => {
@ -796,7 +833,7 @@ fn convert_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::HirId) {
// Account for `type T = _;`.
let mut visitor = PlaceholderHirTyCollector::default();
visitor.visit_trait_item(trait_item);
placeholder_type_error(tcx, None, &[], visitor.0, false);
placeholder_type_error(tcx, None, &[], visitor.0, false, None);
}
hir::TraitItemKind::Type(_, None) => {
@ -805,7 +842,8 @@ fn convert_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::HirId) {
// even if there is no concrete type.
let mut visitor = PlaceholderHirTyCollector::default();
visitor.visit_trait_item(trait_item);
placeholder_type_error(tcx, None, &[], visitor.0, false);
placeholder_type_error(tcx, None, &[], visitor.0, false, None);
}
};
@ -826,7 +864,8 @@ fn convert_impl_item(tcx: TyCtxt<'_>, impl_item_id: hir::HirId) {
// Account for `type T = _;`
let mut visitor = PlaceholderHirTyCollector::default();
visitor.visit_impl_item(impl_item);
placeholder_type_error(tcx, None, &[], visitor.0, false);
placeholder_type_error(tcx, None, &[], visitor.0, false, None);
}
hir::ImplItemKind::Const(..) => {}
}
@ -1654,6 +1693,7 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> {
&sig.decl,
&generics,
Some(ident.span),
None,
),
}
}
@ -1663,9 +1703,15 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> {
ident,
generics,
..
}) => {
AstConv::ty_of_fn(&icx, header.unsafety, header.abi, decl, &generics, Some(ident.span))
}
}) => AstConv::ty_of_fn(
&icx,
header.unsafety,
header.abi,
decl,
&generics,
Some(ident.span),
None,
),
ForeignItem(&hir::ForeignItem {
kind: ForeignItemKind::Fn(ref fn_decl, _, _),
@ -2335,6 +2381,7 @@ fn compute_sig_of_foreign_fn_decl<'tcx>(
decl,
&hir::Generics::empty(),
Some(ident.span),
None,
);
// Feature gate SIMD types in FFI, since I am not sure that the

View file

@ -103,7 +103,8 @@ fn range_search<BorrowType: marker::BorrowType, K, V, Q, R>(
}
}
/// Equivalent to `range_search(k, v, ..)` but without the `Ord` bound.
/// Equivalent to `range_search(root1, root2, ..)` but without the `Ord` bound.
/// Equivalent to `(root1.first_leaf_edge(), root2.last_leaf_edge())` but more efficient.
fn full_range<BorrowType: marker::BorrowType, K, V>(
root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
@ -130,7 +131,7 @@ fn full_range<BorrowType: marker::BorrowType, K, V>(
}
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
/// Creates a pair of leaf edges delimiting a specified range in or underneath a node.
/// Finds the pair of leaf edges delimiting a specific range in a tree.
///
/// The result is meaningful only if the tree is ordered by key, like the tree
/// in a `BTreeMap` is.
@ -149,7 +150,7 @@ pub fn range_search<Q, R>(
range_search(self, self, range)
}
/// Returns (self.first_leaf_edge(), self.last_leaf_edge()), but more efficiently.
/// Finds the pair of leaf edges delimiting an entire tree.
pub fn full_range(
self,
) -> (

View file

@ -181,7 +181,7 @@ pub fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
NodeRef { height: self.height, node: self.node, _marker: PhantomData }
}
/// Irreversibly transistions to a reference that offers traversal,
/// Irreversibly transitions to a reference that permits traversal and offers
/// destructive methods and little else.
pub fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
NodeRef { height: self.height, node: self.node, _marker: PhantomData }

View file

@ -108,7 +108,7 @@ def check_type(ty):
elif ty["kind"] == "function_pointer":
for param in ty["inner"]["generic_params"]:
check_generic_param(param)
check_decl(ty["inner"]["inner"])
check_decl(ty["inner"]["decl"])
elif ty["kind"] == "qualified_path":
check_type(ty["inner"]["self_type"])
check_type(ty["inner"]["trait"])

View file

@ -14,7 +14,7 @@ minifier = "0.0.33"
rayon = { version = "0.3.0", package = "rustc-rayon" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
smallvec = "1.0"
smallvec = "1.6.1"
tempfile = "3"
itertools = "0.9"
regex = "1"

View file

@ -2,6 +2,8 @@
//! the `clean` types but with some fields removed or stringified to simplify the output and not
//! expose unstable compiler internals.
#![allow(rustc::default_hash_types)]
use std::convert::From;
use rustc_ast::ast;
@ -16,6 +18,7 @@
use crate::clean::utils::print_const_expr;
use crate::formats::item_type::ItemType;
use crate::json::JsonRenderer;
use std::collections::HashSet;
impl JsonRenderer<'_> {
pub(super) fn convert_item(&self, item: clean::Item) -> Option<Item> {
@ -225,15 +228,22 @@ fn from(struct_: clean::Union) -> Self {
}
}
fn stringify_header(header: &rustc_hir::FnHeader) -> String {
let mut s = String::from(header.unsafety.prefix_str());
if header.asyncness == rustc_hir::IsAsync::Async {
s.push_str("async ")
crate fn from_fn_header(header: &rustc_hir::FnHeader) -> HashSet<Qualifiers> {
let mut v = HashSet::new();
if let rustc_hir::Unsafety::Unsafe = header.unsafety {
v.insert(Qualifiers::Unsafe);
}
if header.constness == rustc_hir::Constness::Const {
s.push_str("const ")
if let rustc_hir::IsAsync::Async = header.asyncness {
v.insert(Qualifiers::Async);
}
s
if let rustc_hir::Constness::Const = header.constness {
v.insert(Qualifiers::Const);
}
v
}
impl From<clean::Function> for Function {
@ -242,7 +252,7 @@ fn from(function: clean::Function) -> Self {
Function {
decl: decl.into(),
generics: generics.into(),
header: stringify_header(&header),
header: from_fn_header(&header),
abi: header.abi.to_string(),
}
}
@ -364,7 +374,13 @@ impl From<clean::BareFunctionDecl> for FunctionPointer {
fn from(bare_decl: clean::BareFunctionDecl) -> Self {
let clean::BareFunctionDecl { unsafety, generic_params, decl, abi } = bare_decl;
FunctionPointer {
is_unsafe: unsafety == rustc_hir::Unsafety::Unsafe,
header: if let rustc_hir::Unsafety::Unsafe = unsafety {
let mut hs = HashSet::new();
hs.insert(Qualifiers::Unsafe);
hs
} else {
HashSet::new()
},
generic_params: generic_params.into_iter().map(Into::into).collect(),
decl: decl.into(),
abi: abi.to_string(),
@ -439,7 +455,7 @@ fn from(impl_: clean::Impl) -> Self {
Method {
decl: decl.into(),
generics: generics.into(),
header: stringify_header(&header),
header: from_fn_header(&header),
abi: header.abi.to_string(),
has_body,
}

View file

@ -243,7 +243,7 @@ fn after_krate(
)
})
.collect(),
format_version: 3,
format_version: 4,
};
let mut p = self.out_path.clone();
p.push(output.index.get(&output.root).unwrap().name.clone().unwrap());

View file

@ -3,7 +3,7 @@
//! These types are the public API exposed through the `--output-format json` flag. The [`Crate`]
//! struct is the root of the JSON blob and all other items are contained within.
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
@ -281,11 +281,20 @@ pub enum StructType {
Unit,
}
#[non_exhaustive]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[serde(rename_all = "snake_case")]
pub enum Qualifiers {
Const,
Unsafe,
Async,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub struct Function {
pub decl: FnDecl,
pub generics: Generics,
pub header: String,
pub header: HashSet<Qualifiers>,
pub abi: String,
}
@ -293,7 +302,7 @@ pub struct Function {
pub struct Method {
pub decl: FnDecl,
pub generics: Generics,
pub header: String,
pub header: HashSet<Qualifiers>,
pub abi: String,
pub has_body: bool,
}
@ -404,9 +413,9 @@ pub enum Type {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub struct FunctionPointer {
pub is_unsafe: bool,
pub generic_params: Vec<GenericParamDef>,
pub decl: FnDecl,
pub generic_params: Vec<GenericParamDef>,
pub header: HashSet<Qualifiers>,
pub abi: String,
}

View file

@ -0,0 +1,5 @@
// @has header.json "$.index[*][?(@.name=='FnPointer')].inner.type.inner.header" "[]"
pub type FnPointer = fn();
// @has - "$.index[*][?(@.name=='UnsafePointer')].inner.type.inner.header" '["unsafe"]'
pub type UnsafePointer = unsafe fn();

View file

@ -0,0 +1,22 @@
// edition:2018
// @has header.json "$.index[*][?(@.name=='nothing_fn')].inner.header" "[]"
pub fn nothing_fn() {}
// @has - "$.index[*][?(@.name=='const_fn')].inner.header" '["const"]'
pub const fn const_fn() {}
// @has - "$.index[*][?(@.name=='async_fn')].inner.header" '["async"]'
pub async fn async_fn() {}
// @count - "$.index[*][?(@.name=='async_unsafe_fn')].inner.header[*]" 2
// @has - "$.index[*][?(@.name=='async_unsafe_fn')].inner.header[*]" '"async"'
// @has - "$.index[*][?(@.name=='async_unsafe_fn')].inner.header[*]" '"unsafe"'
pub async unsafe fn async_unsafe_fn() {}
// @count - "$.index[*][?(@.name=='const_unsafe_fn')].inner.header[*]" 2
// @has - "$.index[*][?(@.name=='const_unsafe_fn')].inner.header[*]" '"const"'
// @has - "$.index[*][?(@.name=='const_unsafe_fn')].inner.header[*]" '"unsafe"'
pub const unsafe fn const_unsafe_fn() {}
// It's impossible for a function to be both const and async, so no test for that

View file

@ -0,0 +1,26 @@
// edition:2018
pub struct Foo;
impl Foo {
// @has header.json "$.index[*][?(@.name=='nothing_meth')].inner.header" "[]"
pub fn nothing_meth() {}
// @has - "$.index[*][?(@.name=='const_meth')].inner.header" '["const"]'
pub const fn const_meth() {}
// @has - "$.index[*][?(@.name=='async_meth')].inner.header" '["async"]'
pub async fn async_meth() {}
// @count - "$.index[*][?(@.name=='async_unsafe_meth')].inner.header[*]" 2
// @has - "$.index[*][?(@.name=='async_unsafe_meth')].inner.header[*]" '"async"'
// @has - "$.index[*][?(@.name=='async_unsafe_meth')].inner.header[*]" '"unsafe"'
pub async unsafe fn async_unsafe_meth() {}
// @count - "$.index[*][?(@.name=='const_unsafe_meth')].inner.header[*]" 2
// @has - "$.index[*][?(@.name=='const_unsafe_meth')].inner.header[*]" '"const"'
// @has - "$.index[*][?(@.name=='const_unsafe_meth')].inner.header[*]" '"unsafe"'
pub const unsafe fn const_unsafe_meth() {}
// It's impossible for a method to be both const and async, so no test for that
}

View file

@ -0,0 +1,8 @@
// This checks that incorrect params on function parameters are caught
fn function(#[inline] param: u32) {
//~^ ERROR attribute should be applied to function or closure
//~| ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes
}
fn main() {}

View file

@ -0,0 +1,17 @@
error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
--> $DIR/attrs-on-params.rs:3:13
|
LL | fn function(#[inline] param: u32) {
| ^^^^^^^^^
error[E0518]: attribute should be applied to function or closure
--> $DIR/attrs-on-params.rs:3:13
|
LL | fn function(#[inline] param: u32) {
| ^^^^^^^^^-----------
| |
| not a function or closure
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0518`.

View file

@ -0,0 +1,16 @@
use std::{fs, io::*};
use std::collections::HashMap;
type Handle = BufWriter<fs::File>;
struct Thing(HashMap<String, Handle>);
impl Thing {
pub fn die_horribly(&mut self) {
for v in self.0.values() {
v.flush();
//~^ ERROR cannot borrow
}
}
}
fn main() {}

View file

@ -0,0 +1,14 @@
error[E0596]: cannot borrow `*v` as mutable, as it is behind a `&` reference
--> $DIR/issue-82032.rs:10:13
|
LL | for v in self.0.values() {
| ---------------
| | |
| | help: use mutable method: `values_mut()`
| this iterator yields `&` references
LL | v.flush();
| ^ `v` is a `&` reference, so the data it refers to cannot be borrowed as mutable
error: aborting due to previous error
For more information about this error, try `rustc --explain E0596`.

View file

@ -0,0 +1,78 @@
#![deny(disjoint_capture_drop_reorder)]
//~^ NOTE: the lint level is defined here
#[derive(Debug)]
struct Foo(i32);
impl Drop for Foo {
fn drop(&mut self) {
println!("{:?} dropped", self.0);
}
}
struct ConstainsDropField(Foo, Foo);
#[derive(Debug)]
struct ContainsAndImplsDrop(Foo);
impl Drop for ContainsAndImplsDrop {
fn drop(&mut self) {
println!("{:?} dropped", self.0);
}
}
// Test that even if all paths starting at root variable that implement Drop are captured,
// the lint is triggered if the root variable implements drop and isn't captured.
fn test_precise_analysis_parent_root_impl_drop_not_captured() {
let t = ContainsAndImplsDrop(Foo(10));
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
};
c();
}
// Test that lint is triggered if a path that implements Drop is not captured by move
fn test_precise_analysis_drop_paths_not_captured_by_move() {
let t = ConstainsDropField(Foo(10), Foo(20));
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
let _t = &t.1;
};
c();
}
struct S;
impl Drop for S {
fn drop(&mut self) {
}
}
struct T(S, S);
struct U(T, T);
// Test precise analysis for the lint works with paths longer than one.
fn test_precise_analysis_long_path_missing() {
let u = U(T(S, S), T(S, S));
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(u));
let _x = u.0.0;
let _x = u.0.1;
let _x = u.1.0;
};
c();
}
fn main() {
test_precise_analysis_parent_root_impl_drop_not_captured();
test_precise_analysis_drop_paths_not_captured_by_move();
test_precise_analysis_long_path_missing();
}

View file

@ -0,0 +1,49 @@
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/precise.rs:27:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | };
| |_____^
|
note: the lint level is defined here
--> $DIR/precise.rs:1:9
|
LL | #![deny(disjoint_capture_drop_reorder)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/precise.rs:40:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | let _t = &t.1;
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/precise.rs:63:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _x = u.0.0;
LL | | let _x = u.0.1;
LL | | let _x = u.1.0;
LL | | };
| |_____^
|
= note: drop(&(u));
error: aborting due to 3 previous errors

View file

@ -0,0 +1,105 @@
// run-pass
#![deny(disjoint_capture_drop_reorder)]
#[derive(Debug)]
struct Foo(i32);
impl Drop for Foo {
fn drop(&mut self) {
println!("{:?} dropped", self.0);
}
}
struct ConstainsDropField(Foo, Foo);
// Test that if all paths starting at root variable that implement Drop are captured
// then it doesn't trigger the lint.
fn test_precise_analysis_simple_1() {
let t = (Foo(10), Foo(20), Foo(30));
let c = || {
let _t = t.0;
let _t = t.1;
let _t = t.2;
};
c();
}
// Test that if all paths starting at root variable that implement Drop are captured
// then it doesn't trigger the lint.
fn test_precise_analysis_simple_2() {
let t = ConstainsDropField(Foo(10), Foo(20));
let c = || {
let _t = t.0;
let _t = t.1;
};
c();
}
#[derive(Debug)]
struct ContainsAndImplsDrop(Foo);
impl Drop for ContainsAndImplsDrop {
fn drop(&mut self) {
println!("{:?} dropped", self.0);
}
}
// If a path isn't directly captured but requires Drop, then this tests that migrations aren't
// needed if the a parent to that path is captured.
fn test_precise_analysis_parent_captured_1() {
let t = ConstainsDropField(Foo(10), Foo(20));
let c = || {
let _t = t;
};
c();
}
// If a path isn't directly captured but requires Drop, then this tests that migrations aren't
// needed if the a parent to that path is captured.
fn test_precise_analysis_parent_captured_2() {
let t = ContainsAndImplsDrop(Foo(10));
let c = || {
let _t = t;
};
c();
}
struct S;
impl Drop for S {
fn drop(&mut self) {
}
}
struct T(S, S);
struct U(T, T);
// Test that if the path is longer than just one element, precise analysis works correctly.
fn test_precise_analysis_long_path() {
let u = U(T(S, S), T(S, S));
let c = || {
let _x = u.0.0;
let _x = u.0.1;
let _x = u.1.0;
let _x = u.1.1;
};
c();
}
fn main() {
test_precise_analysis_simple_1();
test_precise_analysis_simple_2();
test_precise_analysis_parent_captured_1();
test_precise_analysis_parent_captured_2();
test_precise_analysis_long_path();
}

View file

@ -0,0 +1,16 @@
// run-pass
#![allow(dead_code)]
// Making sure that no overflow occurs.
struct L<T> {
n: Option<T>,
}
type L8<T> = L<L<L<L<L<L<L<L<T>>>>>>>>;
type L64<T> = L8<L8<L8<L8<T>>>>;
fn main() {
use std::mem::size_of;
assert_eq!(size_of::<L64<L64<()>>>(), 1);
assert_eq!(size_of::<L<L64<L64<()>>>>(), 1);
}

View file

@ -2,10 +2,7 @@ error[E0121]: the type placeholder `_` is not allowed within types on item signa
--> $DIR/issue-74086.rs:2:20
|
LL | static BUG: fn(_) -> u8 = |_| 8;
| ^
| |
| not allowed in type signatures
| help: use type parameters instead: `T`
| ^ not allowed in type signatures
error: aborting due to previous error

View file

@ -0,0 +1,10 @@
const TEST4: fn() -> _ = 42;
//~^ ERROR the type placeholder `_` is not allowed within types on item
//signatures
fn main() {
const TEST5: fn() -> _ = 42;
//~^ ERROR the type placeholder `_` is not allowed within types on item
//signatures
}

View file

@ -0,0 +1,15 @@
error[E0121]: the type placeholder `_` is not allowed within types on item signatures
--> $DIR/issue-81885.rs:1:22
|
LL | const TEST4: fn() -> _ = 42;
| ^ not allowed in type signatures
error[E0121]: the type placeholder `_` is not allowed within types on item signatures
--> $DIR/issue-81885.rs:6:26
|
LL | const TEST5: fn() -> _ = 42;
| ^ not allowed in type signatures
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0121`.

View file

@ -3,8 +3,8 @@
#![feature(decl_macro)] //~ ERROR `feature` is ambiguous
extern crate builtin_attrs;
use builtin_attrs::{test, bench};
use builtin_attrs::*;
use builtin_attrs::{bench, test};
#[repr(C)] //~ ERROR `repr` is ambiguous
struct S;

View file

@ -12,7 +12,7 @@ LL | #[repr(C)]
|
= note: `repr` could refer to a built-in attribute
note: `repr` could also refer to the attribute macro imported here
--> $DIR/ambiguous-builtin-attrs.rs:7:5
--> $DIR/ambiguous-builtin-attrs.rs:6:5
|
LL | use builtin_attrs::*;
| ^^^^^^^^^^^^^^^^
@ -26,7 +26,7 @@ LL | #[cfg_attr(all(), repr(C))]
|
= note: `repr` could refer to a built-in attribute
note: `repr` could also refer to the attribute macro imported here
--> $DIR/ambiguous-builtin-attrs.rs:7:5
--> $DIR/ambiguous-builtin-attrs.rs:6:5
|
LL | use builtin_attrs::*;
| ^^^^^^^^^^^^^^^^
@ -40,7 +40,7 @@ LL | fn non_macro_expanded_location<#[repr(C)] T>() {
|
= note: `repr` could refer to a built-in attribute
note: `repr` could also refer to the attribute macro imported here
--> $DIR/ambiguous-builtin-attrs.rs:7:5
--> $DIR/ambiguous-builtin-attrs.rs:6:5
|
LL | use builtin_attrs::*;
| ^^^^^^^^^^^^^^^^
@ -54,7 +54,7 @@ LL | #[repr(C)]
|
= note: `repr` could refer to a built-in attribute
note: `repr` could also refer to the attribute macro imported here
--> $DIR/ambiguous-builtin-attrs.rs:7:5
--> $DIR/ambiguous-builtin-attrs.rs:6:5
|
LL | use builtin_attrs::*;
| ^^^^^^^^^^^^^^^^
@ -82,7 +82,7 @@ LL | #![feature(decl_macro)]
|
= note: `feature` could refer to a built-in attribute
note: `feature` could also refer to the attribute macro imported here
--> $DIR/ambiguous-builtin-attrs.rs:7:5
--> $DIR/ambiguous-builtin-attrs.rs:6:5
|
LL | use builtin_attrs::*;
| ^^^^^^^^^^^^^^^^

View file

@ -29,10 +29,7 @@ error[E0121]: the type placeholder `_` is not allowed within types on item signa
--> $DIR/typeck_type_placeholder_item_help.rs:13:22
|
LL | const TEST4: fn() -> _ = 42;
| ^
| |
| not allowed in type signatures
| help: use type parameters instead: `T`
| ^ not allowed in type signatures
error[E0121]: the type placeholder `_` is not allowed within types on item signatures
--> $DIR/typeck_type_placeholder_item_help.rs:17:18

View file

@ -71,8 +71,8 @@ proc-macro2 = { version = "1", features = ["default"] }
quote = { version = "1", features = ["default"] }
serde = { version = "1.0.82", features = ['derive'] }
serde_json = { version = "1.0.31", features = ["raw_value", "unbounded_depth"] }
smallvec-0_6 = { package = "smallvec", version = "0.6", features = ['union', 'may_dangle'] }
smallvec = { version = "1.0", features = ['union', 'may_dangle'] }
smallvec-0_6 = { package = "smallvec", version = "0.6.14", features = ['union', 'may_dangle'] }
smallvec = { version = "1.6.1", features = ['union', 'may_dangle'] }
syn = { version = "1", features = ['fold', 'full', 'extra-traits', 'visit', 'visit-mut'] }
url = { version = "2.0", features = ['serde'] }