Auto merge of #96495 - Dylan-DPC:rollup-9lm4tpp, r=Dylan-DPC

Rollup of 7 pull requests

Successful merges:

 - #96377 (make `fn() -> _ { .. }` suggestion MachineApplicable)
 - #96397 (Make EncodeWide implement FusedIterator)
 - #96421 (Less `NoDelim`)
 - #96432 (not need `Option` for `dbg_scope`)
 - #96466 (Better error messages when collecting into `[T; n]`)
 - #96471 (replace let else with `?`)
 - #96483 (Add missing `target_feature` to the list of well known cfg names)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-04-28 01:37:03 +00:00
commit 81799cd8fd
34 changed files with 243 additions and 209 deletions

View file

@ -1542,10 +1542,10 @@ pub enum MacArgs {
}
impl MacArgs {
pub fn delim(&self) -> DelimToken {
pub fn delim(&self) -> Option<DelimToken> {
match self {
MacArgs::Delimited(_, delim, _) => delim.to_token(),
MacArgs::Empty | MacArgs::Eq(..) => token::NoDelim,
MacArgs::Delimited(_, delim, _) => Some(delim.to_token()),
MacArgs::Empty | MacArgs::Eq(..) => None,
}
}

View file

@ -464,7 +464,7 @@ fn print_attr_item(&mut self, item: &ast::AttrItem, span: Span) {
Some(MacHeader::Path(&item.path)),
false,
None,
delim.to_token(),
Some(delim.to_token()),
tokens,
true,
span,
@ -530,7 +530,7 @@ fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) {
None,
false,
None,
*delim,
Some(*delim),
tts,
convert_dollar_crate,
dspan.entire(),
@ -556,12 +556,12 @@ fn print_mac_common(
header: Option<MacHeader<'_>>,
has_bang: bool,
ident: Option<Ident>,
delim: DelimToken,
delim: Option<DelimToken>,
tts: &TokenStream,
convert_dollar_crate: bool,
span: Span,
) {
if delim == DelimToken::Brace {
if delim == Some(DelimToken::Brace) {
self.cbox(INDENT_UNIT);
}
match header {
@ -577,7 +577,7 @@ fn print_mac_common(
self.print_ident(ident);
}
match delim {
DelimToken::Brace => {
Some(DelimToken::Brace) => {
if header.is_some() || has_bang || ident.is_some() {
self.nbsp();
}
@ -585,23 +585,25 @@ fn print_mac_common(
if !tts.is_empty() {
self.space();
}
}
_ => {
let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
self.word(token_str)
}
}
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
match delim {
DelimToken::Brace => {
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
let empty = tts.is_empty();
self.bclose(span, empty);
}
_ => {
Some(delim) => {
let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
self.word(token_str);
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
let token_str = self.token_kind_to_string(&token::CloseDelim(delim));
self.word(token_str)
self.word(token_str);
}
None => {
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
}
}
}

View file

@ -20,7 +20,6 @@ pub fn compute_mir_scopes<'ll, 'tcx>(
cx: &CodegenCx<'ll, 'tcx>,
instance: Instance<'tcx>,
mir: &Body<'tcx>,
fn_dbg_scope: &'ll DIScope,
debug_context: &mut FunctionDebugContext<&'ll DIScope, &'ll DILocation>,
) {
// Find all scopes with variables defined in them.
@ -38,40 +37,41 @@ pub fn compute_mir_scopes<'ll, 'tcx>(
// Nothing to emit, of course.
None
};
let mut instantiated = BitSet::new_empty(mir.source_scopes.len());
// Instantiate all scopes.
for idx in 0..mir.source_scopes.len() {
let scope = SourceScope::new(idx);
make_mir_scope(cx, instance, mir, fn_dbg_scope, &variables, debug_context, scope);
make_mir_scope(cx, instance, mir, &variables, debug_context, &mut instantiated, scope);
}
assert!(instantiated.count() == mir.source_scopes.len());
}
fn make_mir_scope<'ll, 'tcx>(
cx: &CodegenCx<'ll, 'tcx>,
instance: Instance<'tcx>,
mir: &Body<'tcx>,
fn_dbg_scope: &'ll DIScope,
variables: &Option<BitSet<SourceScope>>,
debug_context: &mut FunctionDebugContext<&'ll DIScope, &'ll DILocation>,
instantiated: &mut BitSet<SourceScope>,
scope: SourceScope,
) {
if debug_context.scopes[scope].dbg_scope.is_some() {
if instantiated.contains(scope) {
return;
}
let scope_data = &mir.source_scopes[scope];
let parent_scope = if let Some(parent) = scope_data.parent_scope {
make_mir_scope(cx, instance, mir, fn_dbg_scope, variables, debug_context, parent);
make_mir_scope(cx, instance, mir, variables, debug_context, instantiated, parent);
debug_context.scopes[parent]
} else {
// The root is the function itself.
let loc = cx.lookup_debug_loc(mir.span.lo());
debug_context.scopes[scope] = DebugScope {
dbg_scope: Some(fn_dbg_scope),
inlined_at: None,
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_pos,
..debug_context.scopes[scope]
};
instantiated.insert(scope);
return;
};
@ -79,6 +79,7 @@ fn make_mir_scope<'ll, 'tcx>(
// Do not create a DIScope if there are no variables defined in this
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
debug_context.scopes[scope] = parent_scope;
instantiated.insert(scope);
return;
}
@ -100,7 +101,7 @@ fn make_mir_scope<'ll, 'tcx>(
None => unsafe {
llvm::LLVMRustDIBuilderCreateLexicalBlock(
DIB(cx),
parent_scope.dbg_scope.unwrap(),
parent_scope.dbg_scope,
file_metadata,
loc.line,
loc.col,
@ -116,9 +117,10 @@ fn make_mir_scope<'ll, 'tcx>(
});
debug_context.scopes[scope] = DebugScope {
dbg_scope: Some(dbg_scope),
dbg_scope,
inlined_at: inlined_at.or(parent_scope.inlined_at),
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_pos,
};
instantiated.insert(scope);
}

View file

@ -286,9 +286,8 @@ fn create_function_debug_context(
}
// Initialize fn debug context (including scopes).
// FIXME(eddyb) figure out a way to not need `Option` for `dbg_scope`.
let empty_scope = DebugScope {
dbg_scope: None,
dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)),
inlined_at: None,
file_start_pos: BytePos(0),
file_end_pos: BytePos(0),
@ -297,13 +296,7 @@ fn create_function_debug_context(
FunctionDebugContext { scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes) };
// Fill in all the scopes, with the information from the MIR body.
compute_mir_scopes(
self,
instance,
mir,
self.dbg_scope_fn(instance, fn_abi, Some(llfn)),
&mut fn_debug_context,
);
compute_mir_scopes(self, instance, mir, &mut fn_debug_context);
Some(fn_debug_context)
}

View file

@ -39,8 +39,7 @@ pub struct PerLocalVarDebugInfo<'tcx, D> {
#[derive(Clone, Copy, Debug)]
pub struct DebugScope<S, L> {
// FIXME(eddyb) this should never be `None`, after initialization.
pub dbg_scope: Option<S>,
pub dbg_scope: S,
/// Call site location, if this scope was inlined from another function.
pub inlined_at: Option<L>,
@ -61,17 +60,12 @@ pub fn adjust_dbg_scope_for_span<Cx: CodegenMethods<'tcx, DIScope = S, DILocatio
cx: &Cx,
span: Span,
) -> S {
// FIXME(eddyb) this should never be `None`.
let dbg_scope = self
.dbg_scope
.unwrap_or_else(|| bug!("`dbg_scope` is only `None` during initialization"));
let pos = span.lo();
if pos < self.file_start_pos || pos >= self.file_end_pos {
let sm = cx.sess().source_map();
cx.extend_scope_to_file(dbg_scope, &sm.lookup_char_pos(pos).file)
cx.extend_scope_to_file(self.dbg_scope, &sm.lookup_char_pos(pos).file)
} else {
dbg_scope
self.dbg_scope
}
}
}

View file

@ -1272,9 +1272,7 @@ pub fn parse_macro_name_and_helper_attrs(
// Once we've located the `#[proc_macro_derive]` attribute, verify
// that it's of the form `#[proc_macro_derive(Foo)]` or
// `#[proc_macro_derive(Foo, attributes(A, ..))]`
let Some(list) = attr.meta_item_list() else {
return None;
};
let list = attr.meta_item_list()?;
if list.len() != 1 && list.len() != 2 {
diag.span_err(attr.span, "attribute must have either one or two arguments");
return None;

View file

@ -260,16 +260,15 @@ fn generic_extension<'cx, 'tt>(
// Merge the gated spans from parsing the matcher with the pre-existing ones.
sess.gated_spans.merge(gated_spans_snapshot);
// Ignore the delimiters on the RHS.
let rhs = match &rhses[i] {
mbe::TokenTree::Delimited(_, delimited) => &delimited.tts,
let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span),
_ => cx.span_bug(sp, "malformed macro rhs"),
};
let arm_span = rhses[i].span();
let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
let rhs_spans = rhs.tts.iter().map(|t| t.span()).collect::<Vec<_>>();
// rhs has holes ( `$id` and `$(...)` that need filled)
let mut tts = match transcribe(cx, &named_matches, &rhs, transparency) {
let mut tts = match transcribe(cx, &named_matches, &rhs, rhs_span, transparency) {
Ok(tts) => tts,
Err(mut err) => {
err.emit();

View file

@ -29,8 +29,8 @@ fn visit_span(&mut self, span: &mut Span) {
enum Frame<'a> {
Delimited {
tts: &'a [mbe::TokenTree],
delim_token: token::DelimToken,
idx: usize,
delim_token: token::DelimToken,
span: DelimSpan,
},
Sequence {
@ -42,8 +42,8 @@ enum Frame<'a> {
impl<'a> Frame<'a> {
/// Construct a new frame around the delimited set of tokens.
fn new(tts: &'a [mbe::TokenTree]) -> Frame<'a> {
Frame::Delimited { tts, delim_token: token::NoDelim, idx: 0, span: DelimSpan::dummy() }
fn new(src: &'a mbe::Delimited, span: DelimSpan) -> Frame<'a> {
Frame::Delimited { tts: &src.tts, idx: 0, delim_token: src.delim, span }
}
}
@ -85,17 +85,18 @@ fn next(&mut self) -> Option<&'a mbe::TokenTree> {
pub(super) fn transcribe<'a>(
cx: &ExtCtxt<'a>,
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
src: &[mbe::TokenTree],
src: &mbe::Delimited,
src_span: DelimSpan,
transparency: Transparency,
) -> PResult<'a, TokenStream> {
// Nothing for us to transcribe...
if src.is_empty() {
if src.tts.is_empty() {
return Ok(TokenStream::default());
}
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
// we have yet to expand/are still expanding. We start the stack off with the whole RHS.
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(&src)];
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(&src, src_span)];
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
// `repeats` keeps track of where we are in matching at each level, with the last element being

View file

@ -1714,7 +1714,7 @@ pub fn iter<'a>(&'a self, row: R) -> impl Iterator<Item = C> + 'a {
}
pub fn row(&self, row: R) -> Option<&HybridBitSet<C>> {
if let Some(Some(row)) = self.rows.get(row) { Some(row) } else { None }
self.rows.get(row)?.as_ref()
}
/// Intersects `row` with `set`. `set` can be either `BitSet` or

View file

@ -25,21 +25,16 @@ pub(crate) fn find_anon_type<'tcx>(
region: Region<'tcx>,
br: &ty::BoundRegionKind,
) -> Option<(&'tcx hir::Ty<'tcx>, &'tcx hir::FnSig<'tcx>)> {
if let Some(anon_reg) = tcx.is_suitable_region(region) {
let hir_id = tcx.hir().local_def_id_to_hir_id(anon_reg.def_id);
let Some(fn_sig) = tcx.hir().get(hir_id).fn_sig() else {
return None
};
let anon_reg = tcx.is_suitable_region(region)?;
let hir_id = tcx.hir().local_def_id_to_hir_id(anon_reg.def_id);
let fn_sig = tcx.hir().get(hir_id).fn_sig()?;
fn_sig
.decl
.inputs
.iter()
.find_map(|arg| find_component_for_bound_region(tcx, arg, br))
.map(|ty| (ty, fn_sig))
} else {
None
}
fn_sig
.decl
.inputs
.iter()
.find_map(|arg| find_component_for_bound_region(tcx, arg, br))
.map(|ty| (ty, fn_sig))
}
// This method creates a FindNestedTypeVisitor which returns the type corresponding

View file

@ -3,8 +3,8 @@
use crate::ty::subst::{GenericArg, GenericArgKind};
use crate::ty::TyKind::*;
use crate::ty::{
ConstKind, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, InferTy,
ProjectionTy, Term, Ty, TyCtxt, TypeAndMut,
ConstKind, DefIdTree, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef,
InferTy, ProjectionTy, Term, Ty, TyCtxt, TypeAndMut,
};
use rustc_data_structures::fx::FxHashMap;
@ -74,10 +74,10 @@ pub fn is_simple_text(self) -> bool {
}
/// Whether the type can be safely suggested during error recovery.
pub fn is_suggestable(self) -> bool {
fn generic_arg_is_suggestible(arg: GenericArg<'_>) -> bool {
pub fn is_suggestable(self, tcx: TyCtxt<'tcx>) -> bool {
fn generic_arg_is_suggestible<'tcx>(arg: GenericArg<'tcx>, tcx: TyCtxt<'tcx>) -> bool {
match arg.unpack() {
GenericArgKind::Type(ty) => ty.is_suggestable(),
GenericArgKind::Type(ty) => ty.is_suggestable(tcx),
GenericArgKind::Const(c) => const_is_suggestable(c.val()),
_ => true,
}
@ -99,8 +99,7 @@ fn const_is_suggestable(kind: ConstKind<'_>) -> bool {
// temporary, so I'll leave this as a fixme.
match self.kind() {
Opaque(..)
| FnDef(..)
FnDef(..)
| Closure(..)
| Infer(..)
| Generator(..)
@ -108,27 +107,38 @@ fn const_is_suggestable(kind: ConstKind<'_>) -> bool {
| Bound(_, _)
| Placeholder(_)
| Error(_) => false,
Opaque(did, substs) => {
let parent = tcx.parent(*did).expect("opaque types always have a parent");
if let hir::def::DefKind::TyAlias | hir::def::DefKind::AssocTy = tcx.def_kind(parent)
&& let Opaque(parent_did, _) = tcx.type_of(parent).kind()
&& parent_did == did
{
substs.iter().all(|a| generic_arg_is_suggestible(a, tcx))
} else {
false
}
}
Dynamic(dty, _) => dty.iter().all(|pred| match pred.skip_binder() {
ExistentialPredicate::Trait(ExistentialTraitRef { substs, .. }) => {
substs.iter().all(generic_arg_is_suggestible)
substs.iter().all(|a| generic_arg_is_suggestible(a, tcx))
}
ExistentialPredicate::Projection(ExistentialProjection {
substs, term, ..
}) => {
let term_is_suggestable = match term {
Term::Ty(ty) => ty.is_suggestable(),
Term::Ty(ty) => ty.is_suggestable(tcx),
Term::Const(c) => const_is_suggestable(c.val()),
};
term_is_suggestable && substs.iter().all(generic_arg_is_suggestible)
term_is_suggestable && substs.iter().all(|a| generic_arg_is_suggestible(a, tcx))
}
_ => true,
}),
Projection(ProjectionTy { substs: args, .. }) | Adt(_, args) => {
args.iter().all(generic_arg_is_suggestible)
args.iter().all(|a| generic_arg_is_suggestible(a, tcx))
}
Tuple(args) => args.iter().all(|ty| ty.is_suggestable()),
Slice(ty) | RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => ty.is_suggestable(),
Array(ty, c) => ty.is_suggestable() && const_is_suggestable(c.val()),
Tuple(args) => args.iter().all(|ty| ty.is_suggestable(tcx)),
Slice(ty) | RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => ty.is_suggestable(tcx),
Array(ty, c) => ty.is_suggestable(tcx) && const_is_suggestable(c.val()),
_ => true,
}
}

View file

@ -336,9 +336,7 @@ fn evaluate_candidate<'tcx>(
Some(poss)
}
};
let Some((_, child)) = targets.iter().next() else {
return None
};
let (_, child) = targets.iter().next()?;
let child_terminator = &bbs[child].terminator();
let TerminatorKind::SwitchInt {
switch_ty: child_ty,

View file

@ -338,9 +338,7 @@ pub(super) fn check_for_substitution<'a>(
ch: char,
err: &mut Diagnostic,
) -> Option<token::TokenKind> {
let Some(&(_u_char, u_name, ascii_char)) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch) else {
return None;
};
let &(_u_char, u_name, ascii_char) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?;
let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8()));

View file

@ -5,7 +5,7 @@
use rustc_ast::{self as ast};
use rustc_ast::{AstLike, AttrVec, Attribute};
use rustc_errors::PResult;
use rustc_span::{sym, Span, DUMMY_SP};
use rustc_span::{sym, Span};
use std::convert::TryInto;
use std::ops::Range;
@ -400,24 +400,26 @@ fn make_token_stream(
) -> AttrAnnotatedTokenStream {
#[derive(Debug)]
struct FrameData {
open: Span,
open_delim: DelimToken,
// This is `None` for the first frame, `Some` for all others.
open_delim_sp: Option<(DelimToken, Span)>,
inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
}
let mut stack =
vec![FrameData { open: DUMMY_SP, open_delim: DelimToken::NoDelim, inner: vec![] }];
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
let mut token_and_spacing = iter.next();
while let Some((token, spacing)) = token_and_spacing {
match token {
FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
stack.push(FrameData { open: span, open_delim: delim, inner: vec![] });
stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
}
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
// HACK: If we encounter a mismatched `None` delimiter at the top
// level, just ignore it.
if matches!(delim, DelimToken::NoDelim)
&& (stack.len() == 1
|| !matches!(stack.last_mut().unwrap().open_delim, DelimToken::NoDelim))
|| !matches!(
stack.last_mut().unwrap().open_delim_sp.unwrap().0,
DelimToken::NoDelim
))
{
token_and_spacing = iter.next();
continue;
@ -430,7 +432,7 @@ struct FrameData {
// merge our current frame with the one above it. That is, transform
// `[ { < first second } third ]` into `[ { first second } third ]`
if !matches!(delim, DelimToken::NoDelim)
&& matches!(frame_data.open_delim, DelimToken::NoDelim)
&& matches!(frame_data.open_delim_sp.unwrap().0, DelimToken::NoDelim)
{
stack.last_mut().unwrap().inner.extend(frame_data.inner);
// Process our closing delimiter again, this time at the previous
@ -439,12 +441,13 @@ struct FrameData {
continue;
}
let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
assert_eq!(
frame_data.open_delim, delim,
open_delim, delim,
"Mismatched open/close delims: open={:?} close={:?}",
frame_data.open, span
open_delim, span
);
let dspan = DelimSpan::from_pair(frame_data.open, span);
let dspan = DelimSpan::from_pair(open_sp, span);
let stream = AttrAnnotatedTokenStream::new(frame_data.inner);
let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream);
stack
@ -472,7 +475,7 @@ struct FrameData {
// HACK: If we don't have a closing `None` delimiter for our last
// frame, merge the frame with the top-level frame. That is,
// turn `< first second` into `first second`
if stack.len() == 2 && stack[1].open_delim == DelimToken::NoDelim {
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == DelimToken::NoDelim {
let temp_buf = stack.pop().unwrap();
stack.last_mut().unwrap().inner.extend(temp_buf.inner);
}

View file

@ -2043,7 +2043,8 @@ fn parse_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
self.sess.gated_spans.gate(sym::async_closure, span);
}
if self.token.kind == TokenKind::Semi && self.token_cursor.frame.delim == DelimToken::Paren
if self.token.kind == TokenKind::Semi
&& matches!(self.token_cursor.frame.delim_sp, Some((DelimToken::Paren, _)))
{
// It is likely that the closure body is a block but where the
// braces have been removed. We will recover and eat the next

View file

@ -244,14 +244,13 @@ struct TokenCursor {
#[derive(Clone)]
struct TokenCursorFrame {
delim: token::DelimToken,
span: DelimSpan,
delim_sp: Option<(DelimToken, DelimSpan)>,
tree_cursor: tokenstream::Cursor,
}
impl TokenCursorFrame {
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
TokenCursorFrame { delim, span, tree_cursor: tts.into_trees() }
fn new(delim_sp: Option<(DelimToken, DelimSpan)>, tts: TokenStream) -> Self {
TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
}
}
@ -266,7 +265,7 @@ fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
loop {
// FIXME: we currently don't return `NoDelim` open/close delims. To fix #67062 we will
// need to, whereupon the `delim != DelimToken::NoDelim` conditions below can be
// removed, as well as the loop.
// removed.
if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
match tree {
&TokenTree::Token(ref token) => match (desugar_doc_comments, token) {
@ -277,7 +276,7 @@ fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
},
&TokenTree::Delimited(sp, delim, ref tts) => {
// Set `open_delim` to true here because we deal with it immediately.
let frame = TokenCursorFrame::new(sp, delim, tts.clone());
let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
self.stack.push(mem::replace(&mut self.frame, frame));
if delim != DelimToken::NoDelim {
return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
@ -286,12 +285,11 @@ fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
}
};
} else if let Some(frame) = self.stack.pop() {
let delim = self.frame.delim;
let span = self.frame.span;
self.frame = frame;
if delim != DelimToken::NoDelim {
if let Some((delim, span)) = self.frame.delim_sp && delim != DelimToken::NoDelim {
self.frame = frame;
return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
}
self.frame = frame;
// No close delimiter to return; continue on to the next iteration.
} else {
return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
@ -330,8 +328,7 @@ fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token
self.stack.push(mem::replace(
&mut self.frame,
TokenCursorFrame::new(
delim_span,
token::NoDelim,
None,
if attr_style == AttrStyle::Inner {
[TokenTree::token(token::Pound, span), TokenTree::token(token::Not, span), body]
.iter()
@ -431,10 +428,6 @@ pub fn new(
desugar_doc_comments: bool,
subparser_name: Option<&'static str>,
) -> Self {
// Note: because of the way `TokenCursor::inlined_next` is structured, the `span` and
// `delim` arguments here are never used.
let start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens);
let mut parser = Parser {
sess,
token: Token::dummy(),
@ -444,7 +437,7 @@ pub fn new(
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
frame: start_frame,
frame: TokenCursorFrame::new(None, tokens),
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
@ -1025,7 +1018,7 @@ pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R
}
let frame = &self.token_cursor.frame;
if frame.delim != DelimToken::NoDelim {
if let Some((delim, span)) = frame.delim_sp && delim != DelimToken::NoDelim {
let all_normal = (0..dist).all(|i| {
let token = frame.tree_cursor.look_ahead(i);
!matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _)))
@ -1038,7 +1031,7 @@ pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R
looker(&Token::new(token::OpenDelim(*delim), dspan.open))
}
},
None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
None => looker(&Token::new(token::CloseDelim(delim), span.close)),
};
}
}
@ -1198,8 +1191,7 @@ pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
// Grab the tokens from this frame.
let frame = &self.token_cursor.frame;
let stream = frame.tree_cursor.stream.clone();
let span = frame.span;
let delim = frame.delim;
let (delim, span) = frame.delim_sp.unwrap();
// Advance the token cursor through the entire delimited
// sequence. After getting the `OpenDelim` we are *within* the

View file

@ -164,25 +164,29 @@ fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResu
let delim = args.delim();
let hi = self.prev_token.span;
let style =
if delim == token::Brace { MacStmtStyle::Braces } else { MacStmtStyle::NoBraces };
let style = match delim {
Some(token::Brace) => MacStmtStyle::Braces,
Some(_) => MacStmtStyle::NoBraces,
None => unreachable!(),
};
let mac = MacCall { path, args, prior_type_ascription: self.last_type_ascription };
let kind =
if (delim == token::Brace && self.token != token::Dot && self.token != token::Question)
|| self.token == token::Semi
|| self.token == token::Eof
{
StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
} else {
// Since none of the above applied, this is an expression statement macro.
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
let e = self.maybe_recover_from_bad_qpath(e, true)?;
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
StmtKind::Expr(e)
};
let kind = if (style == MacStmtStyle::Braces
&& self.token != token::Dot
&& self.token != token::Question)
|| self.token == token::Semi
|| self.token == token::Eof
{
StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
} else {
// Since none of the above applied, this is an expression statement macro.
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
let e = self.maybe_recover_from_bad_qpath(e, true)?;
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
StmtKind::Expr(e)
};
Ok(self.mk_stmt(lo.to(hi), kind))
}

View file

@ -1183,9 +1183,7 @@ fn smart_resolve_context_dependent_help(
ident: Symbol,
kind: &AssocItemKind,
) -> Option<Symbol> {
let Some((module, _)) = &self.current_trait_ref else {
return None;
};
let (module, _) = self.current_trait_ref.as_ref()?;
if ident == kw::Underscore {
// We do nothing for `_`.
return None;

View file

@ -1043,6 +1043,7 @@ fn fill_well_known_names(&mut self) {
sym::target_has_atomic_load_store,
sym::target_has_atomic,
sym::target_has_atomic_equal_alignment,
sym::target_feature,
sym::panic,
sym::sanitize,
sym::debug_assertions,
@ -1086,6 +1087,10 @@ fn fill_well_known_values(&mut self) {
.into_iter()
.map(|sanitizer| Symbol::intern(sanitizer.as_str().unwrap()));
// Unknown possible values:
// - `feature`
// - `target_feature`
// No-values
for name in [
sym::doc,

View file

@ -217,22 +217,42 @@ fn on_unimplemented_note(
flags.push((sym::_Self, Some(shortname.to_owned())));
}
// Slices give us `[]`, `[{ty}]`
if let ty::Slice(aty) = self_ty.kind() {
flags.push((sym::_Self, Some("[]".to_string())));
if let Some(def) = aty.ty_adt_def() {
// We also want to be able to select the slice's type's original
// signature with no type arguments resolved
let type_string = self.tcx.type_of(def.did()).to_string();
flags.push((sym::_Self, Some(format!("[{type_string}]"))));
}
if aty.is_integral() {
flags.push((sym::_Self, Some("[{integral}]".to_string())));
}
}
// Arrays give us `[]`, `[{ty}; _]` and `[{ty}; N]`
if let ty::Array(aty, len) = self_ty.kind() {
flags.push((sym::_Self, Some("[]".to_owned())));
flags.push((sym::_Self, Some(format!("[{}]", aty))));
flags.push((sym::_Self, Some("[]".to_string())));
let len = len.val().try_to_value().and_then(|v| v.try_to_machine_usize(self.tcx));
flags.push((sym::_Self, Some(format!("[{}; _]", aty))));
if let Some(n) = len {
flags.push((sym::_Self, Some(format!("[{}; {}]", aty, n))));
}
if let Some(def) = aty.ty_adt_def() {
// We also want to be able to select the array's type's original
// signature with no type arguments resolved
let type_string = self.tcx.type_of(def.did()).to_string();
flags.push((sym::_Self, Some(format!("[{}]", type_string))));
let len =
len.val().try_to_value().and_then(|v| v.try_to_machine_usize(self.tcx));
let string = match len {
Some(n) => format!("[{}; {}]", type_string, n),
None => format!("[{}; _]", type_string),
};
flags.push((sym::_Self, Some(string)));
flags.push((sym::_Self, Some(format!("[{type_string}; _]"))));
if let Some(n) = len {
flags.push((sym::_Self, Some(format!("[{type_string}; {n}]"))));
}
}
if aty.is_integral() {
flags.push((sym::_Self, Some("[{integral}; _]".to_string())));
if let Some(n) = len {
flags.push((sym::_Self, Some(format!("[{{integral}}; {n}]"))));
}
}
}
if let ty::Dynamic(traits, _) = self_ty.kind() {

View file

@ -86,7 +86,7 @@ fn generic_arg_mismatch_err(
let param_type = tcx.infer_ctxt().enter(|infcx| {
infcx.resolve_numeric_literals_with_default(tcx.type_of(param.def_id))
});
if param_type.is_suggestable() {
if param_type.is_suggestable(tcx) {
err.span_suggestion(
tcx.def_span(src_def_id),
"consider changing this type parameter to be a `const` generic",

View file

@ -2466,7 +2466,7 @@ fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool
span,
ty,
opt_sugg: Some((span, Applicability::MachineApplicable))
.filter(|_| ty.is_suggestable()),
.filter(|_| ty.is_suggestable(tcx)),
});
ty

View file

@ -757,7 +757,7 @@ pub(in super::super) fn expected_inputs_for_expected_output(
formal_args: &[Ty<'tcx>],
) -> Option<Vec<Ty<'tcx>>> {
let formal_ret = self.resolve_vars_with_obligations(formal_ret);
let Some(ret_ty) = expected_ret.only_has_type(self) else { return None };
let ret_ty = expected_ret.only_has_type(self)?;
// HACK(oli-obk): This is a hack to keep RPIT and TAIT in sync wrt their behaviour.
// Without it, the inference

View file

@ -525,7 +525,7 @@ pub(in super::super) fn suggest_missing_return_type(
self.resolve_numeric_literals_with_default(self.resolve_vars_if_possible(found));
// Only suggest changing the return type for methods that
// haven't set a return type at all (and aren't `fn main()` or an impl).
match (&fn_decl.output, found.is_suggestable(), can_suggest, expected.is_unit()) {
match (&fn_decl.output, found.is_suggestable(self.tcx), can_suggest, expected.is_unit()) {
(&hir::FnRetTy::DefaultReturn(span), true, true, true) => {
err.span_suggestion(
span,

View file

@ -41,7 +41,7 @@
use rustc_middle::ty::util::Discr;
use rustc_middle::ty::util::IntTypeExt;
use rustc_middle::ty::{self, AdtKind, Const, DefIdTree, Ty, TyCtxt};
use rustc_middle::ty::{ReprOptions, ToPredicate, TypeFoldable};
use rustc_middle::ty::{ReprOptions, ToPredicate};
use rustc_session::lint;
use rustc_session::parse::feature_err;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
@ -2004,28 +2004,29 @@ fn infer_return_ty_for_fn_sig<'tcx>(
visitor.visit_ty(ty);
let mut diag = bad_placeholder(tcx, visitor.0, "return type");
let ret_ty = fn_sig.skip_binder().output();
if !ret_ty.references_error() {
if !ret_ty.is_closure() {
let ret_ty_str = match ret_ty.kind() {
// Suggest a function pointer return type instead of a unique function definition
// (e.g. `fn() -> i32` instead of `fn() -> i32 { f }`, the latter of which is invalid
// syntax)
ty::FnDef(..) => ret_ty.fn_sig(tcx).to_string(),
_ => ret_ty.to_string(),
};
if ret_ty.is_suggestable(tcx) {
diag.span_suggestion(
ty.span,
"replace with the correct return type",
ret_ty.to_string(),
Applicability::MachineApplicable,
);
} else if matches!(ret_ty.kind(), ty::FnDef(..)) {
let fn_sig = ret_ty.fn_sig(tcx);
if fn_sig.skip_binder().inputs_and_output.iter().all(|t| t.is_suggestable(tcx)) {
diag.span_suggestion(
ty.span,
"replace with the correct return type",
ret_ty_str,
Applicability::MaybeIncorrect,
fn_sig.to_string(),
Applicability::MachineApplicable,
);
} else {
// We're dealing with a closure, so we should suggest using `impl Fn` or trait bounds
// to prevent the user from getting a papercut while trying to use the unique closure
// syntax (e.g. `[closure@src/lib.rs:2:5: 2:9]`).
diag.help("consider using an `Fn`, `FnMut`, or `FnOnce` trait bound");
diag.note("for more information on `Fn` traits and closure types, see https://doc.rust-lang.org/book/ch13-01-closures.html");
}
} else if ret_ty.is_closure() {
// We're dealing with a closure, so we should suggest using `impl Fn` or trait bounds
// to prevent the user from getting a papercut while trying to use the unique closure
// syntax (e.g. `[closure@src/lib.rs:2:5: 2:9]`).
diag.help("consider using an `Fn`, `FnMut`, or `FnOnce` trait bound");
diag.note("for more information on `Fn` traits and closure types, see https://doc.rust-lang.org/book/ch13-01-closures.html");
}
diag.emit();

View file

@ -96,30 +96,24 @@
#[rustc_on_unimplemented(
on(
_Self = "[{A}]",
message = "a value of type `{Self}` cannot be built since `{Self}` has no definite size",
message = "a slice of type `{Self}` cannot be built since `{Self}` has no definite size",
label = "try explicitly collecting into a `Vec<{A}>`",
),
on(
all(
A = "{integer}",
any(
_Self = "[i8]",
_Self = "[i16]",
_Self = "[i32]",
_Self = "[i64]",
_Self = "[i128]",
_Self = "[isize]",
_Self = "[u8]",
_Self = "[u16]",
_Self = "[u32]",
_Self = "[u64]",
_Self = "[u128]",
_Self = "[usize]"
)
),
message = "a value of type `{Self}` cannot be built since `{Self}` has no definite size",
all(A = "{integer}", any(_Self = "[{integral}]",)),
message = "a slice of type `{Self}` cannot be built since `{Self}` has no definite size",
label = "try explicitly collecting into a `Vec<{A}>`",
),
on(
_Self = "[{A}; _]",
message = "an array of type `{Self}` cannot be built directly from an iterator",
label = "try collecting into a `Vec<{A}>`, then using `.try_into()`",
),
on(
all(A = "{integer}", any(_Self = "[{integral}; _]",)),
message = "an array of type `{Self}` cannot be built directly from an iterator",
label = "try collecting into a `Vec<{A}>`, then using `.try_into()`",
),
message = "a value of type `{Self}` cannot be built from an iterator \
over elements of type `{A}`",
label = "value of type `{Self}` cannot be built from `std::iter::Iterator<Item={A}>`"

View file

@ -25,7 +25,7 @@
use crate::collections::TryReserveError;
use crate::fmt;
use crate::hash::{Hash, Hasher};
use crate::iter::FromIterator;
use crate::iter::{FromIterator, FusedIterator};
use crate::mem;
use crate::ops;
use crate::rc::Rc;
@ -899,6 +899,9 @@ fn size_hint(&self) -> (usize, Option<usize>) {
}
}
#[stable(feature = "encode_wide_fused_iterator", since = "1.62.0")]
impl FusedIterator for EncodeWide<'_> {}
impl Hash for CodePoint {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {

View file

@ -1305,7 +1305,7 @@ fn clean_qpath(hir_ty: &hir::Ty<'_>, cx: &mut DocContext<'_>) -> Type {
fn maybe_expand_private_type_alias(cx: &mut DocContext<'_>, path: &hir::Path<'_>) -> Option<Type> {
let Res::Def(DefKind::TyAlias, def_id) = path.res else { return None };
// Substitute private type aliases
let Some(def_id) = def_id.as_local() else { return None };
let def_id = def_id.as_local()?;
let alias = if !cx.cache.access_levels.is_exported(def_id.to_def_id()) {
&cx.tcx.hir().expect_item(def_id).kind
} else {

View file

@ -0,0 +1,7 @@
fn main() {
//~^ NOTE required by a bound in this
let whatever: [u32; 10] = (0..10).collect();
//~^ ERROR an array of type `[u32; 10]` cannot be built directly from an iterator
//~| NOTE try collecting into a `Vec<{integer}>`, then using `.try_into()`
//~| NOTE required by a bound in `collect`
}

View file

@ -0,0 +1,16 @@
error[E0277]: an array of type `[u32; 10]` cannot be built directly from an iterator
--> $DIR/collect-into-array.rs:3:39
|
LL | let whatever: [u32; 10] = (0..10).collect();
| ^^^^^^^ try collecting into a `Vec<{integer}>`, then using `.try_into()`
|
= help: the trait `FromIterator<{integer}>` is not implemented for `[u32; 10]`
note: required by a bound in `collect`
--> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL
|
LL | fn collect<B: FromIterator<Self::Item>>(self) -> B
| ^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `collect`
error: aborting due to previous error
For more information about this error, try `rustc --explain E0277`.

View file

@ -6,7 +6,7 @@ fn process_slice(data: &[i32]) {
fn main() {
let some_generated_vec = (0..10).collect();
//~^ ERROR the size for values of type `[i32]` cannot be known at compilation time
//~| ERROR a value of type `[i32]` cannot be built since `[i32]` has no definite size
//~| ERROR a slice of type `[i32]` cannot be built since `[i32]` has no definite size
//~| NOTE try explicitly collecting into a `Vec<{integer}>`
//~| NOTE required by a bound in `collect`
//~| NOTE all local variables must have a statically known size

View file

@ -8,7 +8,7 @@ LL | let some_generated_vec = (0..10).collect();
= note: all local variables must have a statically known size
= help: unsized locals are gated as an unstable feature
error[E0277]: a value of type `[i32]` cannot be built since `[i32]` has no definite size
error[E0277]: a slice of type `[i32]` cannot be built since `[i32]` has no definite size
--> $DIR/collect-into-slice.rs:7:38
|
LL | let some_generated_vec = (0..10).collect();

View file

@ -1325,7 +1325,7 @@ pub(crate) fn can_be_overflowed_expr(
}
ast::ExprKind::MacCall(ref mac) => {
match (
rustc_ast::ast::MacDelimiter::from_token(mac.args.delim()),
rustc_ast::ast::MacDelimiter::from_token(mac.args.delim().unwrap()),
context.config.overflow_delimited_expr(),
) {
(Some(ast::MacDelimiter::Bracket), true)

View file

@ -562,7 +562,7 @@ fn delim_token_to_str(
("{ ", " }")
}
}
DelimToken::NoDelim => ("", ""),
DelimToken::NoDelim => unreachable!(),
};
if use_multiple_lines {
let indent_str = shape.indent.to_string_with_newline(context.config);