Auto merge of #120375 - matthiaskrgr:rollup-ueakvms, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #117420 (Make `#![allow_internal_unstable(..)]` work with `stmt_expr_attributes`)
 - #117678 (Stabilize `slice_group_by`)
 - #119917 (Remove special-case handling of `vec.split_off(0)`)
 - #120117 (Update `std::io::Error::downcast` return type)
 - #120329 (RFC 3349 precursors)
 - #120339 (privacy: Refactor top-level visiting in `NamePrivacyVisitor`)
 - #120345 (Clippy subtree update)
 - #120360 (Don't fire `OPAQUE_HIDDEN_INFERRED_BOUND` on sized return of AFIT)
 - #120372 (Fix outdated comment on Box)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-01-26 14:58:10 +00:00
commit e7bbe8ce93
190 changed files with 3195 additions and 1374 deletions

View file

@ -596,11 +596,11 @@ dependencies = [
name = "clippy_dev"
version = "0.0.1"
dependencies = [
"aho-corasick 0.7.20",
"aho-corasick 1.0.2",
"clap",
"indoc",
"itertools",
"opener 0.5.2",
"opener",
"shell-escape",
"walkdir",
]
@ -610,7 +610,7 @@ name = "clippy_lints"
version = "0.1.77"
dependencies = [
"arrayvec",
"cargo_metadata 0.15.4",
"cargo_metadata 0.18.0",
"clippy_config",
"clippy_utils",
"declare_clippy_lint",
@ -2351,7 +2351,7 @@ dependencies = [
"log",
"memchr",
"once_cell",
"opener 0.6.1",
"opener",
"pathdiff",
"pulldown-cmark",
"regex",
@ -2626,16 +2626,6 @@ version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "opener"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "293c15678e37254c15bd2f092314abb4e51d7fdde05c2021279c12631b54f005"
dependencies = [
"bstr",
"winapi",
]
[[package]]
name = "opener"
version = "0.6.1"

View file

@ -3,8 +3,7 @@
use crate::ast::{self, LitKind, MetaItemLit, StrStyle};
use crate::token::{self, Token};
use rustc_lexer::unescape::{
byte_from_char, unescape_byte, unescape_c_string, unescape_char, unescape_literal, CStrUnit,
Mode,
byte_from_char, unescape_byte, unescape_char, unescape_mixed, unescape_unicode, MixedUnit, Mode,
};
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
@ -48,6 +47,9 @@ pub fn from_token_lit(lit: token::Lit) -> Result<LitKind, LitError> {
return Err(LitError::InvalidSuffix);
}
// For byte/char/string literals, chars and escapes have already been
// checked in the lexer (in `cook_lexer_literal`). So we can assume all
// chars and escapes are valid here.
Ok(match kind {
token::Bool => {
assert!(symbol.is_bool_lit());
@ -56,12 +58,12 @@ pub fn from_token_lit(lit: token::Lit) -> Result<LitKind, LitError> {
token::Byte => {
return unescape_byte(symbol.as_str())
.map(LitKind::Byte)
.map_err(|_| LitError::LexerError);
.map_err(|_| panic!("failed to unescape byte literal"));
}
token::Char => {
return unescape_char(symbol.as_str())
.map(LitKind::Char)
.map_err(|_| LitError::LexerError);
.map_err(|_| panic!("failed to unescape char literal"));
}
// There are some valid suffixes for integer and float literals,
@ -77,26 +79,22 @@ pub fn from_token_lit(lit: token::Lit) -> Result<LitKind, LitError> {
let s = symbol.as_str();
// Vanilla strings are so common we optimize for the common case where no chars
// requiring special behaviour are present.
let symbol = if s.contains(['\\', '\r']) {
let symbol = if s.contains('\\') {
let mut buf = String::with_capacity(s.len());
let mut error = Ok(());
// Force-inlining here is aggressive but the closure is
// called on every char in the string, so it can be
// hot in programs with many long strings.
unescape_literal(
// called on every char in the string, so it can be hot in
// programs with many long strings containing escapes.
unescape_unicode(
s,
Mode::Str,
&mut #[inline(always)]
|_, unescaped_char| match unescaped_char {
|_, c| match c {
Ok(c) => buf.push(c),
Err(err) => {
if err.is_fatal() {
error = Err(LitError::LexerError);
}
assert!(!err.is_fatal(), "failed to unescape string literal")
}
},
);
error?;
Symbol::intern(&buf)
} else {
symbol
@ -104,86 +102,46 @@ pub fn from_token_lit(lit: token::Lit) -> Result<LitKind, LitError> {
LitKind::Str(symbol, ast::StrStyle::Cooked)
}
token::StrRaw(n) => {
// Raw strings have no escapes, so we only need to check for invalid chars, and we
// can reuse the symbol on success.
let mut error = Ok(());
unescape_literal(symbol.as_str(), Mode::RawStr, &mut |_, unescaped_char| {
match unescaped_char {
Ok(_) => {}
Err(err) => {
if err.is_fatal() {
error = Err(LitError::LexerError);
}
}
}
});
error?;
// Raw strings have no escapes so no work is needed here.
LitKind::Str(symbol, ast::StrStyle::Raw(n))
}
token::ByteStr => {
let s = symbol.as_str();
let mut buf = Vec::with_capacity(s.len());
let mut error = Ok(());
unescape_literal(s, Mode::ByteStr, &mut |_, c| match c {
unescape_unicode(s, Mode::ByteStr, &mut |_, c| match c {
Ok(c) => buf.push(byte_from_char(c)),
Err(err) => {
if err.is_fatal() {
error = Err(LitError::LexerError);
}
assert!(!err.is_fatal(), "failed to unescape string literal")
}
});
error?;
LitKind::ByteStr(buf.into(), StrStyle::Cooked)
}
token::ByteStrRaw(n) => {
// Raw strings have no escapes, so we only need to check for invalid chars, and we
// can convert the symbol directly to a `Lrc<u8>` on success.
let s = symbol.as_str();
let mut error = Ok(());
unescape_literal(s, Mode::RawByteStr, &mut |_, c| match c {
Ok(_) => {}
Err(err) => {
if err.is_fatal() {
error = Err(LitError::LexerError);
}
}
});
LitKind::ByteStr(s.to_owned().into_bytes().into(), StrStyle::Raw(n))
// Raw strings have no escapes so we can convert the symbol
// directly to a `Lrc<u8>`.
let buf = symbol.as_str().to_owned().into_bytes();
LitKind::ByteStr(buf.into(), StrStyle::Raw(n))
}
token::CStr => {
let s = symbol.as_str();
let mut buf = Vec::with_capacity(s.len());
let mut error = Ok(());
unescape_c_string(s, Mode::CStr, &mut |_span, c| match c {
Ok(CStrUnit::Byte(b)) => buf.push(b),
Ok(CStrUnit::Char(c)) => {
unescape_mixed(s, Mode::CStr, &mut |_span, c| match c {
Ok(MixedUnit::Char(c)) => {
buf.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes())
}
Ok(MixedUnit::HighByte(b)) => buf.push(b),
Err(err) => {
if err.is_fatal() {
error = Err(LitError::LexerError);
}
assert!(!err.is_fatal(), "failed to unescape C string literal")
}
});
error?;
buf.push(0);
LitKind::CStr(buf.into(), StrStyle::Cooked)
}
token::CStrRaw(n) => {
// Raw strings have no escapes, so we only need to check for invalid chars, and we
// can convert the symbol directly to a `Lrc<u8>` on success.
let s = symbol.as_str();
let mut error = Ok(());
unescape_c_string(s, Mode::RawCStr, &mut |_, c| match c {
Ok(_) => {}
Err(err) => {
if err.is_fatal() {
error = Err(LitError::LexerError);
}
}
});
error?;
let mut buf = s.to_owned().into_bytes();
// Raw strings have no escapes so we can convert the symbol
// directly to a `Lrc<u8>` after appending the terminating NUL
// char.
let mut buf = symbol.as_str().to_owned().into_bytes();
buf.push(0);
LitKind::CStr(buf.into(), StrStyle::Raw(n))
}

View file

@ -21,7 +21,7 @@ index 897a5e9..331f66f 100644
-#![cfg_attr(target_has_atomic = "128", feature(integer_atomics))]
#![cfg_attr(test, feature(cfg_match))]
#![feature(int_roundings)]
#![feature(slice_group_by)]
#![feature(split_array)]
diff --git a/atomic.rs b/atomic.rs
index b735957..ea728b6 100644
--- a/atomic.rs

View file

@ -373,7 +373,9 @@ pub(crate) fn cfg_true(&self, attr: &Attribute) -> (bool, Option<MetaItem>) {
/// If attributes are not allowed on expressions, emit an error for `attr`
#[instrument(level = "trace", skip(self))]
pub(crate) fn maybe_emit_expr_attr_err(&self, attr: &Attribute) {
if self.features.is_some_and(|features| !features.stmt_expr_attributes) {
if self.features.is_some_and(|features| !features.stmt_expr_attributes)
&& !attr.span.allows_unstable(sym::stmt_expr_attributes)
{
let mut err = feature_err(
&self.sess,
sym::stmt_expr_attributes,

View file

@ -80,12 +80,12 @@ pub fn is_fatal(&self) -> bool {
}
}
/// Takes a contents of a literal (without quotes) and produces a sequence of
/// escaped characters or errors.
/// Takes the contents of a unicode-only (non-mixed-utf8) literal (without
/// quotes) and produces a sequence of escaped characters or errors.
///
/// Values are returned by invoking `callback`. For `Char` and `Byte` modes,
/// the callback will be called exactly once.
pub fn unescape_literal<F>(src: &str, mode: Mode, callback: &mut F)
pub fn unescape_unicode<F>(src: &str, mode: Mode, callback: &mut F)
where
F: FnMut(Range<usize>, Result<char, EscapeError>),
{
@ -97,50 +97,63 @@ pub fn unescape_literal<F>(src: &str, mode: Mode, callback: &mut F)
}
Str | ByteStr => unescape_non_raw_common(src, mode, callback),
RawStr | RawByteStr => check_raw_common(src, mode, callback),
CStr | RawCStr => unreachable!(),
RawCStr => check_raw_common(src, mode, &mut |r, mut result| {
if let Ok('\0') = result {
result = Err(EscapeError::NulInCStr);
}
callback(r, result)
}),
CStr => unreachable!(),
}
}
/// A unit within CStr. Must not be a nul character.
pub enum CStrUnit {
Byte(u8),
/// Used for mixed utf8 string literals, i.e. those that allow both unicode
/// chars and high bytes.
pub enum MixedUnit {
/// Used for ASCII chars (written directly or via `\x00`..`\x7f` escapes)
/// and Unicode chars (written directly or via `\u` escapes).
///
/// For example, if '¥' appears in a string it is represented here as
/// `MixedUnit::Char('¥')`, and it will be appended to the relevant byte
/// string as the two-byte UTF-8 sequence `[0xc2, 0xa5]`
Char(char),
/// Used for high bytes (`\x80`..`\xff`).
///
/// For example, if `\xa5` appears in a string it is represented here as
/// `MixedUnit::HighByte(0xa5)`, and it will be appended to the relevant
/// byte string as the single byte `0xa5`.
HighByte(u8),
}
impl From<u8> for CStrUnit {
fn from(value: u8) -> Self {
CStrUnit::Byte(value)
impl From<char> for MixedUnit {
fn from(c: char) -> Self {
MixedUnit::Char(c)
}
}
impl From<char> for CStrUnit {
fn from(value: char) -> Self {
CStrUnit::Char(value)
impl From<u8> for MixedUnit {
fn from(n: u8) -> Self {
if n.is_ascii() { MixedUnit::Char(n as char) } else { MixedUnit::HighByte(n) }
}
}
pub fn unescape_c_string<F>(src: &str, mode: Mode, callback: &mut F)
/// Takes the contents of a mixed-utf8 literal (without quotes) and produces
/// a sequence of escaped characters or errors.
///
/// Values are returned by invoking `callback`.
pub fn unescape_mixed<F>(src: &str, mode: Mode, callback: &mut F)
where
F: FnMut(Range<usize>, Result<CStrUnit, EscapeError>),
F: FnMut(Range<usize>, Result<MixedUnit, EscapeError>),
{
match mode {
CStr => {
unescape_non_raw_common(src, mode, &mut |r, mut result| {
if let Ok(CStrUnit::Byte(0) | CStrUnit::Char('\0')) = result {
result = Err(EscapeError::NulInCStr);
}
callback(r, result)
});
}
RawCStr => {
check_raw_common(src, mode, &mut |r, mut result| {
if let Ok('\0') = result {
result = Err(EscapeError::NulInCStr);
}
callback(r, result.map(CStrUnit::Char))
});
}
Char | Byte | Str | RawStr | ByteStr | RawByteStr => unreachable!(),
CStr => unescape_non_raw_common(src, mode, &mut |r, mut result| {
if let Ok(MixedUnit::Char('\0')) = result {
result = Err(EscapeError::NulInCStr);
}
callback(r, result)
}),
Char | Byte | Str | RawStr | ByteStr | RawByteStr | RawCStr => unreachable!(),
}
}
@ -181,29 +194,29 @@ pub fn in_double_quotes(self) -> bool {
}
}
/// Non-byte literals should have `\xXX` escapes that are within the ASCII range.
fn ascii_escapes_should_be_ascii(self) -> bool {
/// Are `\x80`..`\xff` allowed?
fn allow_high_bytes(self) -> bool {
match self {
Char | Str => true,
Byte | ByteStr | CStr => false,
Char | Str => false,
Byte | ByteStr | CStr => true,
RawStr | RawByteStr | RawCStr => unreachable!(),
}
}
/// Whether characters within the literal must be within the ASCII range.
/// Are unicode (non-ASCII) chars allowed?
#[inline]
fn chars_should_be_ascii(self) -> bool {
fn allow_unicode_chars(self) -> bool {
match self {
Byte | ByteStr | RawByteStr => true,
Char | Str | RawStr | CStr | RawCStr => false,
Byte | ByteStr | RawByteStr => false,
Char | Str | RawStr | CStr | RawCStr => true,
}
}
/// Byte literals do not allow unicode escape.
fn is_unicode_escape_disallowed(self) -> bool {
/// Are unicode escapes (`\u`) allowed?
fn allow_unicode_escapes(self) -> bool {
match self {
Byte | ByteStr => true,
Char | Str | CStr => false,
Byte | ByteStr => false,
Char | Str | CStr => true,
RawByteStr | RawStr | RawCStr => unreachable!(),
}
}
@ -217,20 +230,19 @@ pub fn prefix_noraw(self) -> &'static str {
}
}
fn scan_escape<T: From<u8> + From<char>>(
fn scan_escape<T: From<char> + From<u8>>(
chars: &mut Chars<'_>,
mode: Mode,
) -> Result<T, EscapeError> {
// Previous character was '\\', unescape what follows.
let res = match chars.next().ok_or(EscapeError::LoneSlash)? {
'"' => b'"',
'n' => b'\n',
'r' => b'\r',
't' => b'\t',
'\\' => b'\\',
'\'' => b'\'',
'0' => b'\0',
let res: char = match chars.next().ok_or(EscapeError::LoneSlash)? {
'"' => '"',
'n' => '\n',
'r' => '\r',
't' => '\t',
'\\' => '\\',
'\'' => '\'',
'0' => '\0',
'x' => {
// Parse hexadecimal character code.
@ -240,25 +252,23 @@ fn scan_escape<T: From<u8> + From<char>>(
let lo = chars.next().ok_or(EscapeError::TooShortHexEscape)?;
let lo = lo.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?;
let value = hi * 16 + lo;
let value = (hi * 16 + lo) as u8;
if mode.ascii_escapes_should_be_ascii() && !is_ascii(value) {
return Err(EscapeError::OutOfRangeHexEscape);
}
value as u8
return if !mode.allow_high_bytes() && !value.is_ascii() {
Err(EscapeError::OutOfRangeHexEscape)
} else {
// This may be a high byte, but that will only happen if `T` is
// `MixedUnit`, because of the `allow_high_bytes` check above.
Ok(T::from(value as u8))
};
}
'u' => return scan_unicode(chars, mode.is_unicode_escape_disallowed()).map(Into::into),
'u' => return scan_unicode(chars, mode.allow_unicode_escapes()).map(T::from),
_ => return Err(EscapeError::InvalidEscape),
};
Ok(res.into())
Ok(T::from(res))
}
fn scan_unicode(
chars: &mut Chars<'_>,
is_unicode_escape_disallowed: bool,
) -> Result<char, EscapeError> {
fn scan_unicode(chars: &mut Chars<'_>, allow_unicode_escapes: bool) -> Result<char, EscapeError> {
// We've parsed '\u', now we have to parse '{..}'.
if chars.next() != Some('{') {
@ -286,7 +296,7 @@ fn scan_unicode(
// Incorrect syntax has higher priority for error reporting
// than unallowed value for a literal.
if is_unicode_escape_disallowed {
if !allow_unicode_escapes {
return Err(EscapeError::UnicodeEscapeInByte);
}
@ -312,12 +322,8 @@ fn scan_unicode(
}
#[inline]
fn ascii_check(c: char, chars_should_be_ascii: bool) -> Result<char, EscapeError> {
if chars_should_be_ascii && !c.is_ascii() {
Err(EscapeError::NonAsciiCharInByte)
} else {
Ok(c)
}
fn ascii_check(c: char, allow_unicode_chars: bool) -> Result<char, EscapeError> {
if allow_unicode_chars || c.is_ascii() { Ok(c) } else { Err(EscapeError::NonAsciiCharInByte) }
}
fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {
@ -326,7 +332,7 @@ fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result<char, Esca
'\\' => scan_escape(chars, mode),
'\n' | '\t' | '\'' => Err(EscapeError::EscapeOnlyChar),
'\r' => Err(EscapeError::BareCarriageReturn),
_ => ascii_check(c, mode.chars_should_be_ascii()),
_ => ascii_check(c, mode.allow_unicode_chars()),
}?;
if chars.next().is_some() {
return Err(EscapeError::MoreThanOneChar);
@ -336,12 +342,12 @@ fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result<char, Esca
/// Takes a contents of a string literal (without quotes) and produces a
/// sequence of escaped characters or errors.
fn unescape_non_raw_common<F, T: From<u8> + From<char>>(src: &str, mode: Mode, callback: &mut F)
fn unescape_non_raw_common<F, T: From<char> + From<u8>>(src: &str, mode: Mode, callback: &mut F)
where
F: FnMut(Range<usize>, Result<T, EscapeError>),
{
let mut chars = src.chars();
let chars_should_be_ascii = mode.chars_should_be_ascii(); // get this outside the loop
let allow_unicode_chars = mode.allow_unicode_chars(); // get this outside the loop
// The `start` and `end` computation here is complicated because
// `skip_ascii_whitespace` makes us to skip over chars without counting
@ -366,7 +372,7 @@ fn unescape_non_raw_common<F, T: From<u8> + From<char>>(src: &str, mode: Mode, c
}
'"' => Err(EscapeError::EscapeOnlyChar),
'\r' => Err(EscapeError::BareCarriageReturn),
_ => ascii_check(c, chars_should_be_ascii).map(Into::into),
_ => ascii_check(c, allow_unicode_chars).map(T::from),
};
let end = src.len() - chars.as_str().len();
callback(start..end, res);
@ -408,7 +414,7 @@ fn check_raw_common<F>(src: &str, mode: Mode, callback: &mut F)
F: FnMut(Range<usize>, Result<char, EscapeError>),
{
let mut chars = src.chars();
let chars_should_be_ascii = mode.chars_should_be_ascii(); // get this outside the loop
let allow_unicode_chars = mode.allow_unicode_chars(); // get this outside the loop
// The `start` and `end` computation here matches the one in
// `unescape_non_raw_common` for consistency, even though this function
@ -417,7 +423,7 @@ fn check_raw_common<F>(src: &str, mode: Mode, callback: &mut F)
let start = src.len() - chars.as_str().len() - c.len_utf8();
let res = match c {
'\r' => Err(EscapeError::BareCarriageReturnInRawString),
_ => ascii_check(c, chars_should_be_ascii),
_ => ascii_check(c, allow_unicode_chars),
};
let end = src.len() - chars.as_str().len();
callback(start..end, res);
@ -430,7 +436,3 @@ pub fn byte_from_char(c: char) -> u8 {
debug_assert!(res <= u8::MAX as u32, "guaranteed because of ByteStr");
res as u8
}
fn is_ascii(x: u32) -> bool {
x <= 0x7F
}

View file

@ -100,7 +100,7 @@ fn check(literal_text: &str, expected_char: char) {
fn test_unescape_str_warn() {
fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)]) {
let mut unescaped = Vec::with_capacity(literal.len());
unescape_literal(literal, Mode::Str, &mut |range, res| unescaped.push((range, res)));
unescape_unicode(literal, Mode::Str, &mut |range, res| unescaped.push((range, res)));
assert_eq!(unescaped, expected);
}
@ -124,7 +124,7 @@ fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)])
fn test_unescape_str_good() {
fn check(literal_text: &str, expected: &str) {
let mut buf = Ok(String::with_capacity(literal_text.len()));
unescape_literal(literal_text, Mode::Str, &mut |range, c| {
unescape_unicode(literal_text, Mode::Str, &mut |range, c| {
if let Ok(b) = &mut buf {
match c {
Ok(c) => b.push(c),
@ -241,7 +241,7 @@ fn check(literal_text: &str, expected_byte: u8) {
fn test_unescape_byte_str_good() {
fn check(literal_text: &str, expected: &[u8]) {
let mut buf = Ok(Vec::with_capacity(literal_text.len()));
unescape_literal(literal_text, Mode::ByteStr, &mut |range, c| {
unescape_unicode(literal_text, Mode::ByteStr, &mut |range, c| {
if let Ok(b) = &mut buf {
match c {
Ok(c) => b.push(byte_from_char(c)),
@ -264,7 +264,7 @@ fn check(literal_text: &str, expected: &[u8]) {
fn test_unescape_raw_str() {
fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)]) {
let mut unescaped = Vec::with_capacity(literal.len());
unescape_literal(literal, Mode::RawStr, &mut |range, res| unescaped.push((range, res)));
unescape_unicode(literal, Mode::RawStr, &mut |range, res| unescaped.push((range, res)));
assert_eq!(unescaped, expected);
}
@ -276,7 +276,7 @@ fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)])
fn test_unescape_raw_byte_str() {
fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)]) {
let mut unescaped = Vec::with_capacity(literal.len());
unescape_literal(literal, Mode::RawByteStr, &mut |range, res| unescaped.push((range, res)));
unescape_unicode(literal, Mode::RawByteStr, &mut |range, res| unescaped.push((range, res)));
assert_eq!(unescaped, expected);
}

View file

@ -4,7 +4,7 @@
use rustc_middle::ty::{
self, fold::BottomUpFolder, print::TraitPredPrintModifiersAndPath, Ty, TypeFoldable,
};
use rustc_span::Span;
use rustc_span::{symbol::kw, Span};
use rustc_trait_selection::traits;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
@ -96,6 +96,17 @@ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
continue;
}
// HACK: `async fn() -> Self` in traits is "ok"...
// This is not really that great, but it's similar to why the `-> Self`
// return type is well-formed in traits even when `Self` isn't sized.
if let ty::Param(param_ty) = *proj_term.kind()
&& param_ty.name == kw::SelfUpper
&& matches!(opaque.origin, hir::OpaqueTyOrigin::AsyncFn(_))
&& opaque.in_trait
{
continue;
}
let proj_ty =
Ty::new_projection(cx.tcx, proj.projection_ty.def_id, proj.projection_ty.args);
// For every instance of the projection type in the bounds,

View file

@ -400,7 +400,7 @@ fn cook_lexer_literal(
.with_code(error_code!(E0762))
.emit()
}
self.cook_quoted(token::Char, Mode::Char, start, end, 1, 1) // ' '
self.cook_unicode(token::Char, Mode::Char, start, end, 1, 1) // ' '
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
@ -412,7 +412,7 @@ fn cook_lexer_literal(
.with_code(error_code!(E0763))
.emit()
}
self.cook_quoted(token::Byte, Mode::Byte, start, end, 2, 1) // b' '
self.cook_unicode(token::Byte, Mode::Byte, start, end, 2, 1) // b' '
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
@ -424,7 +424,7 @@ fn cook_lexer_literal(
.with_code(error_code!(E0765))
.emit()
}
self.cook_quoted(token::Str, Mode::Str, start, end, 1, 1) // " "
self.cook_unicode(token::Str, Mode::Str, start, end, 1, 1) // " "
}
rustc_lexer::LiteralKind::ByteStr { terminated } => {
if !terminated {
@ -436,7 +436,7 @@ fn cook_lexer_literal(
.with_code(error_code!(E0766))
.emit()
}
self.cook_quoted(token::ByteStr, Mode::ByteStr, start, end, 2, 1) // b" "
self.cook_unicode(token::ByteStr, Mode::ByteStr, start, end, 2, 1) // b" "
}
rustc_lexer::LiteralKind::CStr { terminated } => {
if !terminated {
@ -448,13 +448,13 @@ fn cook_lexer_literal(
.with_code(error_code!(E0767))
.emit()
}
self.cook_c_string(token::CStr, Mode::CStr, start, end, 2, 1) // c" "
self.cook_mixed(token::CStr, Mode::CStr, start, end, 2, 1) // c" "
}
rustc_lexer::LiteralKind::RawStr { n_hashes } => {
if let Some(n_hashes) = n_hashes {
let n = u32::from(n_hashes);
let kind = token::StrRaw(n_hashes);
self.cook_quoted(kind, Mode::RawStr, start, end, 2 + n, 1 + n) // r##" "##
self.cook_unicode(kind, Mode::RawStr, start, end, 2 + n, 1 + n) // r##" "##
} else {
self.report_raw_str_error(start, 1);
}
@ -463,7 +463,7 @@ fn cook_lexer_literal(
if let Some(n_hashes) = n_hashes {
let n = u32::from(n_hashes);
let kind = token::ByteStrRaw(n_hashes);
self.cook_quoted(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n) // br##" "##
self.cook_unicode(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n) // br##" "##
} else {
self.report_raw_str_error(start, 2);
}
@ -472,7 +472,7 @@ fn cook_lexer_literal(
if let Some(n_hashes) = n_hashes {
let n = u32::from(n_hashes);
let kind = token::CStrRaw(n_hashes);
self.cook_c_string(kind, Mode::RawCStr, start, end, 3 + n, 1 + n) // cr##" "##
self.cook_unicode(kind, Mode::RawCStr, start, end, 3 + n, 1 + n) // cr##" "##
} else {
self.report_raw_str_error(start, 2);
}
@ -735,7 +735,7 @@ fn cook_common(
}
}
fn cook_quoted(
fn cook_unicode(
&self,
kind: token::LitKind,
mode: Mode,
@ -745,13 +745,13 @@ fn cook_quoted(
postfix_len: u32,
) -> (token::LitKind, Symbol) {
self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
unescape::unescape_literal(src, mode, &mut |span, result| {
unescape::unescape_unicode(src, mode, &mut |span, result| {
callback(span, result.map(drop))
})
})
}
fn cook_c_string(
fn cook_mixed(
&self,
kind: token::LitKind,
mode: Mode,
@ -761,7 +761,7 @@ fn cook_c_string(
postfix_len: u32,
) -> (token::LitKind, Symbol) {
self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
unescape::unescape_c_string(src, mode, &mut |span, result| {
unescape::unescape_mixed(src, mode, &mut |span, result| {
callback(span, result.map(drop))
})
})

View file

@ -1056,7 +1056,7 @@ fn find_width_map_from_snippet(
fn unescape_string(string: &str) -> Option<string::String> {
let mut buf = string::String::new();
let mut ok = true;
unescape::unescape_literal(string, unescape::Mode::Str, &mut |_, unescaped_char| {
unescape::unescape_unicode(string, unescape::Mode::Str, &mut |_, unescaped_char| {
match unescaped_char {
Ok(c) => buf.push(c),
Err(_) => ok = false,

View file

@ -917,7 +917,7 @@ fn warn_multiple(
return;
}
dead_codes.sort_by_key(|v| v.level);
for group in dead_codes[..].group_by(|a, b| a.level == b.level) {
for group in dead_codes[..].chunk_by(|a, b| a.level == b.level) {
self.lint_at_single_level(&group, participle, Some(def_id), report_on);
}
}

View file

@ -11,7 +11,6 @@
#![feature(let_chains)]
#![feature(map_try_insert)]
#![feature(min_specialization)]
#![feature(slice_group_by)]
#![feature(try_blocks)]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]

View file

@ -22,7 +22,6 @@
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId, CRATE_DEF_ID};
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{AssocItemKind, ForeignItemKind, ItemId, PatKind};
use rustc_middle::hir::nested_filter;
use rustc_middle::middle::privacy::{EffectiveVisibilities, EffectiveVisibility, Level};
use rustc_middle::query::Providers;
use rustc_middle::ty::GenericArgs;
@ -34,9 +33,9 @@
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Span;
use std::fmt;
use std::marker::PhantomData;
use std::ops::ControlFlow;
use std::{fmt, mem};
use errors::{
FieldIsPrivate, FieldIsPrivateLabel, FromPrivateDependencyInPublicInterface, InPublicInterface,
@ -933,7 +932,6 @@ fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) {
struct NamePrivacyVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
current_item: LocalDefId,
}
impl<'tcx> NamePrivacyVisitor<'tcx> {
@ -949,6 +947,7 @@ fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
// Checks that a field in a struct constructor (expression or pattern) is accessible.
fn check_field(
&mut self,
hir_id: hir::HirId, // ID of the field use
use_ctxt: Span, // syntax context of the field name at the use site
span: Span, // span of the field pattern, e.g., `x: 0`
def: ty::AdtDef<'tcx>, // definition of the struct or enum
@ -961,7 +960,6 @@ fn check_field(
// definition of the field
let ident = Ident::new(kw::Empty, use_ctxt);
let hir_id = self.tcx.local_def_id_to_hir_id(self.current_item);
let def_id = self.tcx.adjust_ident_and_get_scope(ident, def.did(), hir_id).1;
if !field.vis.is_accessible_from(def_id, self.tcx) {
self.tcx.dcx().emit_err(FieldIsPrivate {
@ -980,33 +978,13 @@ fn check_field(
}
impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> {
type NestedFilter = nested_filter::All;
/// We want to visit items in the context of their containing
/// module and so forth, so supply a crate for doing a deep walk.
fn nested_visit_map(&mut self) -> Self::Map {
self.tcx.hir()
}
fn visit_mod(&mut self, _m: &'tcx hir::Mod<'tcx>, _s: Span, _n: hir::HirId) {
// Don't visit nested modules, since we run a separate visitor walk
// for each module in `effective_visibilities`
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
fn visit_nested_body(&mut self, body_id: hir::BodyId) {
let old_maybe_typeck_results =
self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
self.maybe_typeck_results.replace(self.tcx.typeck_body(body_id));
self.visit_body(self.tcx.hir().body(body_id));
self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
let orig_current_item = mem::replace(&mut self.current_item, item.owner_id.def_id);
intravisit::walk_item(self, item);
self.current_item = orig_current_item;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
if let hir::ExprKind::Struct(qpath, fields, ref base) = expr.kind {
let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
@ -1020,17 +998,17 @@ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
let field = fields
.iter()
.find(|f| self.typeck_results().field_index(f.hir_id) == vf_index);
let (use_ctxt, span) = match field {
Some(field) => (field.ident.span, field.span),
None => (base.span, base.span),
let (hir_id, use_ctxt, span) = match field {
Some(field) => (field.hir_id, field.ident.span, field.span),
None => (base.hir_id, base.span, base.span),
};
self.check_field(use_ctxt, span, adt, variant_field, true);
self.check_field(hir_id, use_ctxt, span, adt, variant_field, true);
}
} else {
for field in fields {
let use_ctxt = field.ident.span;
let (hir_id, use_ctxt, span) = (field.hir_id, field.ident.span, field.span);
let index = self.typeck_results().field_index(field.hir_id);
self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false);
self.check_field(hir_id, use_ctxt, span, adt, &variant.fields[index], false);
}
}
}
@ -1044,9 +1022,9 @@ fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
let adt = self.typeck_results().pat_ty(pat).ty_adt_def().unwrap();
let variant = adt.variant_of_res(res);
for field in fields {
let use_ctxt = field.ident.span;
let (hir_id, use_ctxt, span) = (field.hir_id, field.ident.span, field.span);
let index = self.typeck_results().field_index(field.hir_id);
self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false);
self.check_field(hir_id, use_ctxt, span, adt, &variant.fields[index], false);
}
}
@ -1741,17 +1719,12 @@ pub fn provide(providers: &mut Providers) {
fn check_mod_privacy(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
// Check privacy of names not checked in previous compilation stages.
let mut visitor = NamePrivacyVisitor {
tcx,
maybe_typeck_results: None,
current_item: module_def_id.to_local_def_id(),
};
let (module, span, hir_id) = tcx.hir().get_module(module_def_id);
intravisit::walk_mod(&mut visitor, module, hir_id);
let mut visitor = NamePrivacyVisitor { tcx, maybe_typeck_results: None };
tcx.hir().visit_item_likes_in_module(module_def_id, &mut visitor);
// Check privacy of explicitly written types and traits as well as
// inferred types of expressions and patterns.
let span = tcx.def_span(module_def_id);
let mut visitor = TypePrivacyVisitor { tcx, module_def_id, maybe_typeck_results: None, span };
tcx.hir().visit_item_likes_in_module(module_def_id, &mut visitor);
}

View file

@ -191,8 +191,7 @@
#[fundamental]
#[stable(feature = "rust1", since = "1.0.0")]
// The declaration of the `Box` struct must be kept in sync with the
// `alloc::alloc::box_free` function or ICEs will happen. See the comment
// on `box_free` for more details.
// compiler or ICEs will happen.
pub struct Box<
T: ?Sized,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,

View file

@ -149,7 +149,6 @@
#![feature(set_ptr_value)]
#![feature(sized_type_properties)]
#![feature(slice_from_ptr_range)]
#![feature(slice_group_by)]
#![feature(slice_ptr_get)]
#![feature(slice_ptr_len)]
#![feature(slice_range)]

View file

@ -51,14 +51,14 @@
pub use core::slice::{from_mut_ptr_range, from_ptr_range};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{from_raw_parts, from_raw_parts_mut};
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
pub use core::slice::{ChunkBy, ChunkByMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{Chunks, Windows};
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub use core::slice::{ChunksExact, ChunksExactMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{ChunksMut, Split, SplitMut};
#[unstable(feature = "slice_group_by", issue = "80552")]
pub use core::slice::{GroupBy, GroupByMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{Iter, IterMut};
#[stable(feature = "rchunks", since = "1.31.0")]

View file

@ -2204,14 +2204,6 @@ fn assert_failed(at: usize, len: usize) -> ! {
assert_failed(at, self.len());
}
if at == 0 {
// the new vector can take over the original buffer and avoid the copy
return mem::replace(
self,
Vec::with_capacity_in(self.capacity(), self.allocator().clone()),
);
}
let other_len = self.len - at;
let mut other = Vec::with_capacity_in(other_len, self.allocator().clone());

View file

@ -29,7 +29,6 @@
#![feature(iter_advance_by)]
#![feature(iter_next_chunk)]
#![feature(round_char_boundary)]
#![feature(slice_group_by)]
#![feature(slice_partition_dedup)]
#![feature(string_remove_matches)]
#![feature(const_btree_len)]

View file

@ -1614,10 +1614,10 @@ macro_rules! m {
}
#[test]
fn test_group_by() {
fn test_chunk_by() {
let slice = &[1, 1, 1, 3, 3, 2, 2, 2, 1, 0];
let mut iter = slice.group_by(|a, b| a == b);
let mut iter = slice.chunk_by(|a, b| a == b);
assert_eq!(iter.next(), Some(&[1, 1, 1][..]));
assert_eq!(iter.next(), Some(&[3, 3][..]));
assert_eq!(iter.next(), Some(&[2, 2, 2][..]));
@ -1625,7 +1625,7 @@ fn test_group_by() {
assert_eq!(iter.next(), Some(&[0][..]));
assert_eq!(iter.next(), None);
let mut iter = slice.group_by(|a, b| a == b);
let mut iter = slice.chunk_by(|a, b| a == b);
assert_eq!(iter.next_back(), Some(&[0][..]));
assert_eq!(iter.next_back(), Some(&[1][..]));
assert_eq!(iter.next_back(), Some(&[2, 2, 2][..]));
@ -1633,7 +1633,7 @@ fn test_group_by() {
assert_eq!(iter.next_back(), Some(&[1, 1, 1][..]));
assert_eq!(iter.next_back(), None);
let mut iter = slice.group_by(|a, b| a == b);
let mut iter = slice.chunk_by(|a, b| a == b);
assert_eq!(iter.next(), Some(&[1, 1, 1][..]));
assert_eq!(iter.next_back(), Some(&[0][..]));
assert_eq!(iter.next(), Some(&[3, 3][..]));
@ -1643,10 +1643,10 @@ fn test_group_by() {
}
#[test]
fn test_group_by_mut() {
fn test_chunk_by_mut() {
let slice = &mut [1, 1, 1, 3, 3, 2, 2, 2, 1, 0];
let mut iter = slice.group_by_mut(|a, b| a == b);
let mut iter = slice.chunk_by_mut(|a, b| a == b);
assert_eq!(iter.next(), Some(&mut [1, 1, 1][..]));
assert_eq!(iter.next(), Some(&mut [3, 3][..]));
assert_eq!(iter.next(), Some(&mut [2, 2, 2][..]));
@ -1654,7 +1654,7 @@ fn test_group_by_mut() {
assert_eq!(iter.next(), Some(&mut [0][..]));
assert_eq!(iter.next(), None);
let mut iter = slice.group_by_mut(|a, b| a == b);
let mut iter = slice.chunk_by_mut(|a, b| a == b);
assert_eq!(iter.next_back(), Some(&mut [0][..]));
assert_eq!(iter.next_back(), Some(&mut [1][..]));
assert_eq!(iter.next_back(), Some(&mut [2, 2, 2][..]));
@ -1662,7 +1662,7 @@ fn test_group_by_mut() {
assert_eq!(iter.next_back(), Some(&mut [1, 1, 1][..]));
assert_eq!(iter.next_back(), None);
let mut iter = slice.group_by_mut(|a, b| a == b);
let mut iter = slice.chunk_by_mut(|a, b| a == b);
assert_eq!(iter.next(), Some(&mut [1, 1, 1][..]));
assert_eq!(iter.next_back(), Some(&mut [0][..]));
assert_eq!(iter.next(), Some(&mut [3, 3][..]));

View file

@ -958,23 +958,35 @@ fn test_append() {
#[test]
fn test_split_off() {
let mut vec = vec![1, 2, 3, 4, 5, 6];
let orig_ptr = vec.as_ptr();
let orig_capacity = vec.capacity();
let vec2 = vec.split_off(4);
let split_off = vec.split_off(4);
assert_eq!(vec, [1, 2, 3, 4]);
assert_eq!(vec2, [5, 6]);
assert_eq!(split_off, [5, 6]);
assert_eq!(vec.capacity(), orig_capacity);
assert_eq!(vec.as_ptr(), orig_ptr);
}
#[test]
fn test_split_off_take_all() {
let mut vec = vec![1, 2, 3, 4, 5, 6];
// Allocate enough capacity that we can tell whether the split-off vector's
// capacity is based on its size, or (incorrectly) on the original capacity.
let mut vec = Vec::with_capacity(1000);
vec.extend([1, 2, 3, 4, 5, 6]);
let orig_ptr = vec.as_ptr();
let orig_capacity = vec.capacity();
let vec2 = vec.split_off(0);
let split_off = vec.split_off(0);
assert_eq!(vec, []);
assert_eq!(vec2, [1, 2, 3, 4, 5, 6]);
assert_eq!(split_off, [1, 2, 3, 4, 5, 6]);
assert_eq!(vec.capacity(), orig_capacity);
assert_eq!(vec2.as_ptr(), orig_ptr);
assert_eq!(vec.as_ptr(), orig_ptr);
// The split-off vector should be newly-allocated, and should not have
// stolen the original vector's allocation.
assert!(split_off.capacity() < orig_capacity);
assert_ne!(split_off.as_ptr(), orig_ptr);
}
#[test]

View file

@ -3248,26 +3248,26 @@ unsafe impl<'a, T> TrustedRandomAccessNoCoerce for IterMut<'a, T> {
/// An iterator over slice in (non-overlapping) chunks separated by a predicate.
///
/// This struct is created by the [`group_by`] method on [slices].
/// This struct is created by the [`chunk_by`] method on [slices].
///
/// [`group_by`]: slice::group_by
/// [`chunk_by`]: slice::chunk_by
/// [slices]: slice
#[unstable(feature = "slice_group_by", issue = "80552")]
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct GroupBy<'a, T: 'a, P> {
pub struct ChunkBy<'a, T: 'a, P> {
slice: &'a [T],
predicate: P,
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> GroupBy<'a, T, P> {
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> ChunkBy<'a, T, P> {
pub(super) fn new(slice: &'a [T], predicate: P) -> Self {
GroupBy { slice, predicate }
ChunkBy { slice, predicate }
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> Iterator for GroupBy<'a, T, P>
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> Iterator for ChunkBy<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
@ -3300,8 +3300,8 @@ fn last(mut self) -> Option<Self::Item> {
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> DoubleEndedIterator for GroupBy<'a, T, P>
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> DoubleEndedIterator for ChunkBy<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
@ -3322,39 +3322,39 @@ fn next_back(&mut self) -> Option<Self::Item> {
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> FusedIterator for GroupBy<'a, T, P> where P: FnMut(&T, &T) -> bool {}
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> FusedIterator for ChunkBy<'a, T, P> where P: FnMut(&T, &T) -> bool {}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for GroupBy<'a, T, P> {
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for ChunkBy<'a, T, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GroupBy").field("slice", &self.slice).finish()
f.debug_struct("ChunkBy").field("slice", &self.slice).finish()
}
}
/// An iterator over slice in (non-overlapping) mutable chunks separated
/// by a predicate.
///
/// This struct is created by the [`group_by_mut`] method on [slices].
/// This struct is created by the [`chunk_by_mut`] method on [slices].
///
/// [`group_by_mut`]: slice::group_by_mut
/// [`chunk_by_mut`]: slice::chunk_by_mut
/// [slices]: slice
#[unstable(feature = "slice_group_by", issue = "80552")]
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct GroupByMut<'a, T: 'a, P> {
pub struct ChunkByMut<'a, T: 'a, P> {
slice: &'a mut [T],
predicate: P,
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> GroupByMut<'a, T, P> {
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> ChunkByMut<'a, T, P> {
pub(super) fn new(slice: &'a mut [T], predicate: P) -> Self {
GroupByMut { slice, predicate }
ChunkByMut { slice, predicate }
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> Iterator for GroupByMut<'a, T, P>
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> Iterator for ChunkByMut<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
@ -3388,8 +3388,8 @@ fn last(mut self) -> Option<Self::Item> {
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> DoubleEndedIterator for GroupByMut<'a, T, P>
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> DoubleEndedIterator for ChunkByMut<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
@ -3411,12 +3411,12 @@ fn next_back(&mut self) -> Option<Self::Item> {
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> FusedIterator for GroupByMut<'a, T, P> where P: FnMut(&T, &T) -> bool {}
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a, P> FusedIterator for ChunkByMut<'a, T, P> where P: FnMut(&T, &T) -> bool {}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for GroupByMut<'a, T, P> {
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for ChunkByMut<'a, T, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GroupByMut").field("slice", &self.slice).finish()
f.debug_struct("ChunkByMut").field("slice", &self.slice).finish()
}
}

View file

@ -68,8 +68,8 @@
#[unstable(feature = "array_windows", issue = "75027")]
pub use iter::ArrayWindows;
#[unstable(feature = "slice_group_by", issue = "80552")]
pub use iter::{GroupBy, GroupByMut};
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
pub use iter::{ChunkBy, ChunkByMut};
#[stable(feature = "split_inclusive", since = "1.51.0")]
pub use iter::{SplitInclusive, SplitInclusiveMut};
@ -1748,18 +1748,16 @@ pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T>
/// Returns an iterator over the slice producing non-overlapping runs
/// of elements using the predicate to separate them.
///
/// The predicate is called on two elements following themselves,
/// it means the predicate is called on `slice[0]` and `slice[1]`
/// then on `slice[1]` and `slice[2]` and so on.
/// The predicate is called for every pair of consecutive elements,
/// meaning that it is called on `slice[0]` and `slice[1]`,
/// followed by `slice[1]` and `slice[2]`, and so on.
///
/// # Examples
///
/// ```
/// #![feature(slice_group_by)]
///
/// let slice = &[1, 1, 1, 3, 3, 2, 2, 2];
///
/// let mut iter = slice.group_by(|a, b| a == b);
/// let mut iter = slice.chunk_by(|a, b| a == b);
///
/// assert_eq!(iter.next(), Some(&[1, 1, 1][..]));
/// assert_eq!(iter.next(), Some(&[3, 3][..]));
@ -1770,41 +1768,37 @@ pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T>
/// This method can be used to extract the sorted subslices:
///
/// ```
/// #![feature(slice_group_by)]
///
/// let slice = &[1, 1, 2, 3, 2, 3, 2, 3, 4];
///
/// let mut iter = slice.group_by(|a, b| a <= b);
/// let mut iter = slice.chunk_by(|a, b| a <= b);
///
/// assert_eq!(iter.next(), Some(&[1, 1, 2, 3][..]));
/// assert_eq!(iter.next(), Some(&[2, 3][..]));
/// assert_eq!(iter.next(), Some(&[2, 3, 4][..]));
/// assert_eq!(iter.next(), None);
/// ```
#[unstable(feature = "slice_group_by", issue = "80552")]
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
#[inline]
pub fn group_by<F>(&self, pred: F) -> GroupBy<'_, T, F>
pub fn chunk_by<F>(&self, pred: F) -> ChunkBy<'_, T, F>
where
F: FnMut(&T, &T) -> bool,
{
GroupBy::new(self, pred)
ChunkBy::new(self, pred)
}
/// Returns an iterator over the slice producing non-overlapping mutable
/// runs of elements using the predicate to separate them.
///
/// The predicate is called on two elements following themselves,
/// it means the predicate is called on `slice[0]` and `slice[1]`
/// then on `slice[1]` and `slice[2]` and so on.
/// The predicate is called for every pair of consecutive elements,
/// meaning that it is called on `slice[0]` and `slice[1]`,
/// followed by `slice[1]` and `slice[2]`, and so on.
///
/// # Examples
///
/// ```
/// #![feature(slice_group_by)]
///
/// let slice = &mut [1, 1, 1, 3, 3, 2, 2, 2];
///
/// let mut iter = slice.group_by_mut(|a, b| a == b);
/// let mut iter = slice.chunk_by_mut(|a, b| a == b);
///
/// assert_eq!(iter.next(), Some(&mut [1, 1, 1][..]));
/// assert_eq!(iter.next(), Some(&mut [3, 3][..]));
@ -1815,24 +1809,22 @@ pub fn group_by<F>(&self, pred: F) -> GroupBy<'_, T, F>
/// This method can be used to extract the sorted subslices:
///
/// ```
/// #![feature(slice_group_by)]
///
/// let slice = &mut [1, 1, 2, 3, 2, 3, 2, 3, 4];
///
/// let mut iter = slice.group_by_mut(|a, b| a <= b);
/// let mut iter = slice.chunk_by_mut(|a, b| a <= b);
///
/// assert_eq!(iter.next(), Some(&mut [1, 1, 2, 3][..]));
/// assert_eq!(iter.next(), Some(&mut [2, 3][..]));
/// assert_eq!(iter.next(), Some(&mut [2, 3, 4][..]));
/// assert_eq!(iter.next(), None);
/// ```
#[unstable(feature = "slice_group_by", issue = "80552")]
#[stable(feature = "slice_group_by", since = "CURRENT_RUSTC_VERSION")]
#[inline]
pub fn group_by_mut<F>(&mut self, pred: F) -> GroupByMut<'_, T, F>
pub fn chunk_by_mut<F>(&mut self, pred: F) -> ChunkByMut<'_, T, F>
where
F: FnMut(&T, &T) -> bool,
{
GroupByMut::new(self, pred)
ChunkByMut::new(self, pred)
}
/// Divides one slice into two at an index.

View file

@ -101,7 +101,6 @@
#![cfg_attr(target_has_atomic = "128", feature(integer_atomics))]
#![cfg_attr(test, feature(cfg_match))]
#![feature(int_roundings)]
#![feature(slice_group_by)]
#![feature(split_array)]
#![feature(strict_provenance)]
#![feature(strict_provenance_atomic_ptr)]

View file

@ -816,12 +816,12 @@ pub fn into_inner(self) -> Option<Box<dyn error::Error + Send + Sync>> {
}
}
/// Attempt to downgrade the inner error to `E` if any.
/// Attempt to downcast the inner error to `E` if any.
///
/// If this [`Error`] was constructed via [`new`] then this function will
/// attempt to perform downgrade on it, otherwise it will return [`Err`].
///
/// If downgrade succeeds, it will return [`Ok`], otherwise it will also
/// If the downcast succeeds, it will return [`Ok`], otherwise it will also
/// return [`Err`].
///
/// [`new`]: Error::new
@ -852,13 +852,39 @@ pub fn into_inner(self) -> Option<Box<dyn error::Error + Send + Sync>> {
/// impl From<io::Error> for E {
/// fn from(err: io::Error) -> E {
/// err.downcast::<E>()
/// .map(|b| *b)
/// .unwrap_or_else(E::Io)
/// }
/// }
///
/// impl From<E> for io::Error {
/// fn from(err: E) -> io::Error {
/// match err {
/// E::Io(io_error) => io_error,
/// e => io::Error::new(io::ErrorKind::Other, e),
/// }
/// }
/// }
///
/// # fn main() {
/// let e = E::SomeOtherVariant;
/// // Convert it to an io::Error
/// let io_error = io::Error::from(e);
/// // Cast it back to the original variant
/// let e = E::from(io_error);
/// assert!(matches!(e, E::SomeOtherVariant));
///
/// let io_error = io::Error::from(io::ErrorKind::AlreadyExists);
/// // Convert it to E
/// let e = E::from(io_error);
/// // Cast it back to the original variant
/// let io_error = io::Error::from(e);
/// assert_eq!(io_error.kind(), io::ErrorKind::AlreadyExists);
/// assert!(io_error.get_ref().is_none());
/// assert!(io_error.raw_os_error().is_none());
/// # }
/// ```
#[unstable(feature = "io_error_downcast", issue = "99262")]
pub fn downcast<E>(self) -> result::Result<Box<E>, Self>
pub fn downcast<E>(self) -> result::Result<E, Self>
where
E: error::Error + Send + Sync + 'static,
{
@ -872,7 +898,7 @@ pub fn downcast<E>(self) -> result::Result<Box<E>, Self>
// And the compiler should be able to eliminate the branch
// that produces `Err` here since b.error.is::<E>()
// returns true.
Ok(res.unwrap())
Ok(*res.unwrap())
}
repr_data => Err(Self { repr: Repr::new(repr_data) }),
}

View file

@ -157,7 +157,7 @@ impl error::Error for E {}
fn test_std_io_error_downcast() {
// Case 1: custom error, downcast succeeds
let io_error = Error::new(ErrorKind::Other, Bojji(true));
let e: Box<Bojji> = io_error.downcast().unwrap();
let e: Bojji = io_error.downcast().unwrap();
assert!(e.0);
// Case 2: custom error, downcast fails
@ -166,7 +166,7 @@ fn test_std_io_error_downcast() {
// ensures that the custom error is intact
assert_eq!(ErrorKind::Other, io_error.kind());
let e: Box<Bojji> = io_error.downcast().unwrap();
let e: Bojji = io_error.downcast().unwrap();
assert!(e.0);
// Case 3: os error

View file

@ -5606,6 +5606,7 @@ Released 2018-09-13
[`suspicious_else_formatting`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_else_formatting
[`suspicious_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_map
[`suspicious_op_assign_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_op_assign_impl
[`suspicious_open_options`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_open_options
[`suspicious_operation_groupings`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_operation_groupings
[`suspicious_splitn`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_splitn
[`suspicious_to_owned`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_to_owned
@ -5814,6 +5815,7 @@ Released 2018-09-13
[`absolute-paths-max-segments`]: https://doc.rust-lang.org/clippy/lint_configuration.html#absolute-paths-max-segments
[`absolute-paths-allowed-crates`]: https://doc.rust-lang.org/clippy/lint_configuration.html#absolute-paths-allowed-crates
[`allowed-dotfiles`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allowed-dotfiles
[`allowed-duplicate-crates`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allowed-duplicate-crates
[`enforce-iter-loop-reborrow`]: https://doc.rust-lang.org/clippy/lint_configuration.html#enforce-iter-loop-reborrow
[`check-private-items`]: https://doc.rust-lang.org/clippy/lint_configuration.html#check-private-items
[`pub-underscore-fields-behavior`]: https://doc.rust-lang.org/clippy/lint_configuration.html#pub-underscore-fields-behavior

View file

@ -212,7 +212,7 @@ default configuration of Clippy. By default, any configuration will replace the
* `doc-valid-idents = ["ClipPy"]` would replace the default list with `["ClipPy"]`.
* `doc-valid-idents = ["ClipPy", ".."]` would append `ClipPy` to the default list.
**Default Value:** `["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "DirectX", "ECMAScript", "GPLv2", "GPLv3", "GitHub", "GitLab", "IPv4", "IPv6", "ClojureScript", "CoffeeScript", "JavaScript", "PureScript", "TypeScript", "WebAssembly", "NaN", "NaNs", "OAuth", "GraphQL", "OCaml", "OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenDNS", "WebGL", "WebGL2", "WebGPU", "TensorFlow", "TrueType", "iOS", "macOS", "FreeBSD", "TeX", "LaTeX", "BibTeX", "BibLaTeX", "MinGW", "CamelCase"]`
**Default Value:** `["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "DirectX", "ECMAScript", "GPLv2", "GPLv3", "GitHub", "GitLab", "IPv4", "IPv6", "ClojureScript", "CoffeeScript", "JavaScript", "PureScript", "TypeScript", "WebAssembly", "NaN", "NaNs", "OAuth", "GraphQL", "OCaml", "OpenDNS", "OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenTelemetry", "WebGL", "WebGL2", "WebGPU", "TensorFlow", "TrueType", "iOS", "macOS", "FreeBSD", "TeX", "LaTeX", "BibTeX", "BibLaTeX", "MinGW", "CamelCase"]`
---
**Affected lints:**
@ -768,7 +768,19 @@ Additional dotfiles (files or directories starting with a dot) to allow
* [`path_ends_with_ext`](https://rust-lang.github.io/rust-clippy/master/index.html#path_ends_with_ext)
## `allowed-duplicate-crates`
A list of crate names to allow duplicates of
**Default Value:** `[]`
---
**Affected lints:**
* [`multiple_crate_versions`](https://rust-lang.github.io/rust-clippy/master/index.html#multiple_crate_versions)
## `enforce-iter-loop-reborrow`
Whether to recommend using implicit into iter for reborrowed values.
#### Example
```no_run
let mut vec = vec![1, 2, 3];
@ -793,7 +805,7 @@ for _ in &mut *rmvec {}
## `check-private-items`
Whether to also run the listed lints on private items.
**Default Value:** `false`
@ -806,9 +818,10 @@ for _ in &mut *rmvec {}
## `pub-underscore-fields-behavior`
Lint "public" fields in a struct that are prefixed with an underscore based on their
exported visibility, or whether they are marked as "pub".
**Default Value:** `"PublicallyExported"`
**Default Value:** `"PubliclyExported"`
---
**Affected lints:**

View file

@ -2,7 +2,9 @@
use crate::types::{DisallowedPath, MacroMatcher, MatchLintBehaviour, PubUnderscoreFieldsBehaviour, Rename};
use crate::ClippyConfiguration;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_session::Session;
use rustc_span::edit_distance::edit_distance;
use rustc_span::{BytePos, Pos, SourceFile, Span, SyntaxContext};
use serde::de::{IgnoredAny, IntoDeserializer, MapAccess, Visitor};
use serde::{Deserialize, Deserializer, Serialize};
@ -26,7 +28,7 @@
"NaN", "NaNs",
"OAuth", "GraphQL",
"OCaml",
"OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenDNS",
"OpenDNS", "OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenTelemetry",
"WebGL", "WebGL2", "WebGPU",
"TensorFlow",
"TrueType",
@ -59,18 +61,25 @@ fn from_toml_error(file: &SourceFile, error: &toml::de::Error) -> Self {
#[derive(Debug)]
struct ConfError {
message: String,
suggestion: Option<Suggestion>,
span: Span,
}
impl ConfError {
fn from_toml(file: &SourceFile, error: &toml::de::Error) -> Self {
let span = error.span().unwrap_or(0..file.source_len.0 as usize);
Self::spanned(file, error.message(), span)
Self::spanned(file, error.message(), None, span)
}
fn spanned(file: &SourceFile, message: impl Into<String>, span: Range<usize>) -> Self {
fn spanned(
file: &SourceFile,
message: impl Into<String>,
suggestion: Option<Suggestion>,
span: Range<usize>,
) -> Self {
Self {
message: message.into(),
suggestion,
span: Span::new(
file.start_pos + BytePos::from_usize(span.start),
file.start_pos + BytePos::from_usize(span.end),
@ -147,16 +156,18 @@ fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error> where V: MapA
match Field::deserialize(name.get_ref().as_str().into_deserializer()) {
Err(e) => {
let e: FieldError = e;
errors.push(ConfError::spanned(self.0, e.0, name.span()));
errors.push(ConfError::spanned(self.0, e.error, e.suggestion, name.span()));
}
$(Ok(Field::$name) => {
$(warnings.push(ConfError::spanned(self.0, format!("deprecated field `{}`. {}", name.get_ref(), $dep), name.span()));)?
$(warnings.push(ConfError::spanned(self.0, format!("deprecated field `{}`. {}", name.get_ref(), $dep), None, name.span()));)?
let raw_value = map.next_value::<toml::Spanned<toml::Value>>()?;
let value_span = raw_value.span();
match <$ty>::deserialize(raw_value.into_inner()) {
Err(e) => errors.push(ConfError::spanned(self.0, e.to_string().replace('\n', " ").trim(), value_span)),
Err(e) => errors.push(ConfError::spanned(self.0, e.to_string().replace('\n', " ").trim(), None, value_span)),
Ok(value) => match $name {
Some(_) => errors.push(ConfError::spanned(self.0, format!("duplicate field `{}`", name.get_ref()), name.span())),
Some(_) => {
errors.push(ConfError::spanned(self.0, format!("duplicate field `{}`", name.get_ref()), None, name.span()));
}
None => {
$name = Some(value);
// $new_conf is the same as one of the defined `$name`s, so
@ -165,7 +176,7 @@ fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error> where V: MapA
Some(_) => errors.push(ConfError::spanned(self.0, concat!(
"duplicate field `", stringify!($new_conf),
"` (provided as `", stringify!($name), "`)"
), name.span())),
), None, name.span())),
None => $new_conf = $name.clone(),
})?
},
@ -523,7 +534,11 @@ pub fn get_configuration_metadata() -> Vec<ClippyConfiguration> {
///
/// Additional dotfiles (files or directories starting with a dot) to allow
(allowed_dotfiles: FxHashSet<String> = FxHashSet::default()),
/// Lint: EXPLICIT_ITER_LOOP
/// Lint: MULTIPLE_CRATE_VERSIONS.
///
/// A list of crate names to allow duplicates of
(allowed_duplicate_crates: FxHashSet<String> = FxHashSet::default()),
/// Lint: EXPLICIT_ITER_LOOP.
///
/// Whether to recommend using implicit into iter for reborrowed values.
///
@ -543,15 +558,15 @@ pub fn get_configuration_metadata() -> Vec<ClippyConfiguration> {
/// for _ in &mut *rmvec {}
/// ```
(enforce_iter_loop_reborrow: bool = false),
/// Lint: MISSING_SAFETY_DOC, UNNECESSARY_SAFETY_DOC, MISSING_PANICS_DOC, MISSING_ERRORS_DOC
/// Lint: MISSING_SAFETY_DOC, UNNECESSARY_SAFETY_DOC, MISSING_PANICS_DOC, MISSING_ERRORS_DOC.
///
/// Whether to also run the listed lints on private items.
(check_private_items: bool = false),
/// Lint: PUB_UNDERSCORE_FIELDS
/// Lint: PUB_UNDERSCORE_FIELDS.
///
/// Lint "public" fields in a struct that are prefixed with an underscore based on their
/// exported visibility, or whether they are marked as "pub".
(pub_underscore_fields_behavior: PubUnderscoreFieldsBehaviour = PubUnderscoreFieldsBehaviour::PublicallyExported),
(pub_underscore_fields_behavior: PubUnderscoreFieldsBehaviour = PubUnderscoreFieldsBehaviour::PubliclyExported),
}
/// Search for the configuration file.
@ -669,10 +684,16 @@ fn read_inner(sess: &Session, path: &io::Result<(Option<PathBuf>, Vec<String>)>)
// all conf errors are non-fatal, we just use the default conf in case of error
for error in errors {
sess.dcx().span_err(
let mut diag = sess.dcx().struct_span_err(
error.span,
format!("error reading Clippy's configuration file: {}", error.message),
);
if let Some(sugg) = error.suggestion {
diag.span_suggestion(error.span, sugg.message, sugg.suggestion, Applicability::MaybeIncorrect);
}
diag.emit();
}
for warning in warnings {
@ -689,19 +710,31 @@ fn read_inner(sess: &Session, path: &io::Result<(Option<PathBuf>, Vec<String>)>)
const SEPARATOR_WIDTH: usize = 4;
#[derive(Debug)]
struct FieldError(String);
struct FieldError {
error: String,
suggestion: Option<Suggestion>,
}
#[derive(Debug)]
struct Suggestion {
message: &'static str,
suggestion: &'static str,
}
impl std::error::Error for FieldError {}
impl Display for FieldError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.pad(&self.0)
f.pad(&self.error)
}
}
impl serde::de::Error for FieldError {
fn custom<T: Display>(msg: T) -> Self {
Self(msg.to_string())
Self {
error: msg.to_string(),
suggestion: None,
}
}
fn unknown_field(field: &str, expected: &'static [&'static str]) -> Self {
@ -723,7 +756,20 @@ fn unknown_field(field: &str, expected: &'static [&'static str]) -> Self {
write!(msg, "{:SEPARATOR_WIDTH$}{field:column_width$}", " ").unwrap();
}
}
Self(msg)
let suggestion = expected
.iter()
.filter_map(|expected| {
let dist = edit_distance(field, expected, 4)?;
Some((dist, expected))
})
.min_by_key(|&(dist, _)| dist)
.map(|(_, suggestion)| Suggestion {
message: "perhaps you meant",
suggestion,
});
Self { error: msg, suggestion }
}
}

View file

@ -11,6 +11,7 @@
extern crate rustc_data_structures;
#[allow(unused_extern_crates)]
extern crate rustc_driver;
extern crate rustc_errors;
extern crate rustc_session;
extern crate rustc_span;

View file

@ -129,6 +129,6 @@ fn serialize<S>(&self, _serializer: S) -> Result<S::Ok, S::Error>
#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize)]
pub enum PubUnderscoreFieldsBehaviour {
PublicallyExported,
PubliclyExported,
AllPubFields,
}

View file

@ -4,11 +4,11 @@ version = "0.0.1"
edition = "2021"
[dependencies]
aho-corasick = "0.7"
aho-corasick = "1.0"
clap = "4.1.4"
indoc = "1.0"
itertools = "0.11"
opener = "0.5"
opener = "0.6"
shell-escape = "0.1"
walkdir = "2.3"

View file

@ -504,9 +504,8 @@ fn is_ident_char(c: u8) -> bool {
}
let searcher = AhoCorasickBuilder::new()
.dfa(true)
.match_kind(aho_corasick::MatchKind::LeftmostLongest)
.build_with_size::<u16, _, _>(replacements.iter().map(|&(x, _)| x.as_bytes()))
.build(replacements.iter().map(|&(x, _)| x.as_bytes()))
.unwrap();
let mut result = String::with_capacity(contents.len() + 1024);
@ -928,7 +927,7 @@ fn remove_line_splices(s: &str) -> String {
.and_then(|s| s.strip_suffix('"'))
.unwrap_or_else(|| panic!("expected quoted string, found `{s}`"));
let mut res = String::with_capacity(s.len());
unescape::unescape_literal(s, unescape::Mode::Str, &mut |range, ch| {
unescape::unescape_unicode(s, unescape::Mode::Str, &mut |range, ch| {
if ch.is_ok() {
res.push_str(&s[range]);
}

View file

@ -10,7 +10,7 @@ edition = "2021"
[dependencies]
arrayvec = { version = "0.7", default-features = false }
cargo_metadata = "0.15.3"
cargo_metadata = "0.18"
clippy_config = { path = "../clippy_config" }
clippy_utils = { path = "../clippy_utils" }
declare_clippy_lint = { path = "../declare_clippy_lint" }

View file

@ -14,10 +14,10 @@
/// This lint warns when you use `Arc` with a type that does not implement `Send` or `Sync`.
///
/// ### Why is this bad?
/// `Arc<T>` is an Atomic `RC<T>` and guarantees that updates to the reference counter are
/// Atomic. This is useful in multiprocessing scenarios. To send an `Arc<T>` across processes
/// and make use of the atomic ref counter, `T` must be [both `Send` and `Sync`](https://doc.rust-lang.org/std/sync/struct.Arc.html#impl-Send-for-Arc%3CT%3E),
/// either `T` should be made `Send + Sync` or an `Rc` should be used instead of an `Arc`
/// `Arc<T>` is a thread-safe `Rc<T>` and guarantees that updates to the reference counter
/// use atomic operations. To send an `Arc<T>` across thread boundaries and
/// share ownership between multiple threads, `T` must be [both `Send` and `Sync`](https://doc.rust-lang.org/std/sync/struct.Arc.html#thread-safety),
/// so either `T` should be made `Send + Sync` or an `Rc` should be used instead of an `Arc`
///
/// ### Example
/// ```no_run

View file

@ -67,6 +67,11 @@ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
);
if let ExprKind::Block(block, _) = &cond.kind {
if !block.span.eq_ctxt(expr.span) {
// If the block comes from a macro, or as an argument to a macro,
// do not lint.
return;
}
if block.rules == BlockCheckMode::DefaultBlock {
if block.stmts.is_empty() {
if let Some(ex) = &block.expr {

View file

@ -6,6 +6,7 @@
use cargo_metadata::MetadataCommand;
use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_lint_allowed;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::hir_id::CRATE_HIR_ID;
use rustc_lint::{LateContext, LateLintPass, Lint};
use rustc_session::impl_lint_pass;
@ -128,6 +129,8 @@
/// ### Known problems
/// Because this can be caused purely by the dependencies
/// themselves, it's not always possible to fix this issue.
/// In those cases, you can allow that specific crate using
/// the `allowed_duplicate_crates` configuration option.
///
/// ### Example
/// ```toml
@ -163,6 +166,7 @@
}
pub struct Cargo {
pub allowed_duplicate_crates: FxHashSet<String>,
pub ignore_publish: bool,
}
@ -208,7 +212,7 @@ fn check_crate(&mut self, cx: &LateContext<'_>) {
{
match MetadataCommand::new().exec() {
Ok(metadata) => {
multiple_crate_versions::check(cx, &metadata);
multiple_crate_versions::check(cx, &metadata, &self.allowed_duplicate_crates);
},
Err(e) => {
for lint in WITH_DEPS_LINTS {

View file

@ -3,27 +3,40 @@
use cargo_metadata::{DependencyKind, Metadata, Node, Package, PackageId};
use clippy_utils::diagnostics::span_lint;
use itertools::Itertools;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_lint::LateContext;
use rustc_span::DUMMY_SP;
use super::MULTIPLE_CRATE_VERSIONS;
pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata) {
pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata, allowed_duplicate_crates: &FxHashSet<String>) {
let local_name = cx.tcx.crate_name(LOCAL_CRATE);
let mut packages = metadata.packages.clone();
packages.sort_by(|a, b| a.name.cmp(&b.name));
if let Some(resolve) = &metadata.resolve
&& let Some(local_id) = packages.iter().find_map(|p| {
if p.name == local_name.as_str() {
// p.name contains the original crate names with dashes intact
// local_name contains the crate name as a namespace, with the dashes converted to underscores
// the code below temporarily rectifies this discrepancy
if p.name
.as_bytes()
.iter()
.map(|b| if b == &b'-' { &b'_' } else { b })
.eq(local_name.as_str().as_bytes())
{
Some(&p.id)
} else {
None
}
})
{
for (name, group) in &packages.iter().group_by(|p| p.name.clone()) {
for (name, group) in &packages
.iter()
.filter(|p| !allowed_duplicate_crates.contains(&p.name))
.group_by(|p| &p.name)
{
let group: Vec<&Package> = group.collect();
if group.len() <= 1 {

View file

@ -439,6 +439,7 @@
crate::methods::STR_SPLIT_AT_NEWLINE_INFO,
crate::methods::SUSPICIOUS_COMMAND_ARG_SPACE_INFO,
crate::methods::SUSPICIOUS_MAP_INFO,
crate::methods::SUSPICIOUS_OPEN_OPTIONS_INFO,
crate::methods::SUSPICIOUS_SPLITN_INFO,
crate::methods::SUSPICIOUS_TO_OWNED_INFO,
crate::methods::TYPE_ID_ON_BOX_INFO,

View file

@ -1,6 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::last_path_segment;
use clippy_utils::source::snippet_with_context;
use clippy_utils::{last_path_segment, std_or_core};
use rustc_errors::Applicability;
use rustc_hir::{def, Expr, ExprKind, GenericArg, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
@ -42,12 +42,14 @@ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
&& ty.span.ctxt() == ctxt
{
let mut applicability = Applicability::MachineApplicable;
let sugg = make_sugg(cx, ty_path, ctxt, &mut applicability);
let Some(path) = std_or_core(cx) else { return };
let path = format!("{path}::iter::empty");
let sugg = make_sugg(cx, ty_path, ctxt, &mut applicability, &path);
span_lint_and_sugg(
cx,
DEFAULT_INSTEAD_OF_ITER_EMPTY,
expr.span,
"`std::iter::empty()` is the more idiomatic way",
&format!("`{path}()` is the more idiomatic way"),
"try",
sugg,
applicability,
@ -61,6 +63,7 @@ fn make_sugg(
ty_path: &rustc_hir::QPath<'_>,
ctxt: SyntaxContext,
applicability: &mut Applicability,
path: &str,
) -> String {
if let Some(last) = last_path_segment(ty_path).args
&& let Some(iter_ty) = last.args.iter().find_map(|arg| match arg {
@ -69,10 +72,10 @@ fn make_sugg(
})
{
format!(
"std::iter::empty::<{}>()",
"{path}::<{}>()",
snippet_with_context(cx, iter_ty.span, ctxt, "..", applicability).0
)
} else {
"std::iter::empty()".to_owned()
format!("{path}()")
}
}

View file

@ -1,10 +1,10 @@
use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::numeric_literal;
use clippy_utils::source::snippet_opt;
use clippy_utils::{get_parent_node, numeric_literal};
use rustc_ast::ast::{LitFloatType, LitIntType, LitKind};
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, walk_stmt, Visitor};
use rustc_hir::{Block, Body, Expr, ExprKind, FnRetTy, HirId, ItemKind, Lit, Node, Stmt, StmtKind};
use rustc_hir::{Block, Body, ConstContext, Expr, ExprKind, FnRetTy, HirId, Lit, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, FloatTy, IntTy, PolyFnSig, Ty};
@ -50,11 +50,11 @@
impl<'tcx> LateLintPass<'tcx> for DefaultNumericFallback {
fn check_body(&mut self, cx: &LateContext<'tcx>, body: &'tcx Body<'_>) {
let is_parent_const = if let Some(Node::Item(item)) = get_parent_node(cx.tcx, body.id().hir_id) {
matches!(item.kind, ItemKind::Const(..))
} else {
false
};
let hir = cx.tcx.hir();
let is_parent_const = matches!(
hir.body_const_context(hir.body_owner_def_id(body.id())),
Some(ConstContext::Const { inline: false } | ConstContext::Static(_))
);
let mut visitor = NumericFallbackVisitor::new(cx, is_parent_const);
visitor.visit_body(body);
}

View file

@ -1,6 +1,6 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then};
use clippy_utils::ty::{implements_trait, implements_trait_with_env, is_copy};
use clippy_utils::{is_lint_allowed, match_def_path, paths};
use clippy_utils::{has_non_exhaustive_attr, is_lint_allowed, match_def_path, paths};
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_expr, walk_fn, walk_item, FnKind, Visitor};
@ -450,6 +450,7 @@ fn check_partial_eq_without_eq<'tcx>(cx: &LateContext<'tcx>, span: Span, trait_r
&& let Some(eq_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Eq)
&& let Some(def_id) = trait_ref.trait_def_id()
&& cx.tcx.is_diagnostic_item(sym::PartialEq, def_id)
&& !has_non_exhaustive_attr(cx.tcx, *adt)
&& let param_env = param_env_for_derived_eq(cx.tcx, adt.did(), eq_trait_def_id)
&& !implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, None, &[])
// If all of our fields implement `Eq`, we can implement `Eq` too

View file

@ -6,7 +6,7 @@
use rustc_ast::{CoroutineKind, Fn, FnRetTy, Item, ItemKind};
use rustc_data_structures::sync::Lrc;
use rustc_errors::emitter::HumanEmitter;
use rustc_errors::DiagCtxt;
use rustc_errors::{DiagCtxt, DiagnosticBuilder};
use rustc_lint::LateContext;
use rustc_parse::maybe_new_parser_from_source_str;
use rustc_parse::parser::ForceCollect;
@ -53,7 +53,7 @@ fn check_code_sample(code: String, edition: Edition, ignore: bool) -> (bool, Vec
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
Ok(p) => p,
Err(errs) => {
errs.into_iter().for_each(|err| err.cancel());
errs.into_iter().for_each(DiagnosticBuilder::cancel);
return (false, test_attr_spans);
},
};

View file

@ -6,8 +6,8 @@
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_path, Visitor};
use rustc_hir::{
GenericArg, GenericArgs, HirId, Impl, ImplItemKind, ImplItemRef, Item, ItemKind, PatKind, Path, PathSegment, Ty,
TyKind,
FnRetTy, GenericArg, GenericArgs, HirId, Impl, ImplItemKind, ImplItemRef, Item, ItemKind, PatKind, Path,
PathSegment, Ty, TyKind,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter::OnlyBodies;
@ -197,10 +197,13 @@ fn convert_to_from(
// fn into([mut] self) -> T -> fn into([mut] v: T) -> T
// ~~~~ ~~~~
(self_ident.span, format!("val: {from}")),
];
if let FnRetTy::Return(_) = sig.decl.output {
// fn into(self) -> T -> fn into(self) -> Self
// ~ ~~~~
(sig.decl.output.span(), String::from("Self")),
];
suggestions.push((sig.decl.output.span(), String::from("Self")));
}
let mut finder = SelfFinder {
cx,

View file

@ -53,7 +53,9 @@ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
// List of spans to lint. (lint_span, first_span)
let mut lint_spans = Vec::new();
let Ok(impls) = cx.tcx.crate_inherent_impls(()) else { return };
let Ok(impls) = cx.tcx.crate_inherent_impls(()) else {
return;
};
let inherent_impls = cx
.tcx
.with_stable_hashing_context(|hcx| impls.inherent_impls.to_sorted(&hcx, true));

View file

@ -574,6 +574,7 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) {
warn_on_all_wildcard_imports,
check_private_items,
pub_underscore_fields_behavior,
ref allowed_duplicate_crates,
blacklisted_names: _,
cyclomatic_complexity_threshold: _,
@ -719,7 +720,7 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) {
store.register_late_pass(|_| Box::new(needless_update::NeedlessUpdate));
store.register_late_pass(|_| Box::new(needless_borrowed_ref::NeedlessBorrowedRef));
store.register_late_pass(|_| Box::new(borrow_deref_ref::BorrowDerefRef));
store.register_late_pass(|_| Box::new(no_effect::NoEffect));
store.register_late_pass(|_| Box::<no_effect::NoEffect>::default());
store.register_late_pass(|_| Box::new(temporary_assignment::TemporaryAssignment));
store.register_late_pass(move |_| Box::new(transmute::Transmute::new(msrv())));
store.register_late_pass(move |_| {
@ -947,6 +948,7 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) {
store.register_late_pass(move |_| {
Box::new(cargo::Cargo {
ignore_publish: cargo_ignore_publish,
allowed_duplicate_crates: allowed_duplicate_crates.clone(),
})
});
store.register_early_pass(|| Box::new(crate_in_macro_def::CrateInMacroDef));

View file

@ -31,7 +31,7 @@ fn emit_lint(cx: &LateContext<'_>, vec: &Expr<'_>, pushed_item: &Expr<'_>, ctxt:
vec.span,
"it looks like the same item is being pushed into this Vec",
None,
&format!("try using vec![{item_str};SIZE] or {vec_str}.resize(NEW_SIZE, {item_str})"),
&format!("consider using vec![{item_str};SIZE] or {vec_str}.resize(NEW_SIZE, {item_str})"),
);
}

View file

@ -3,7 +3,7 @@
use clippy_utils::source::{snippet, snippet_with_applicability};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::is_non_aggregate_primitive_type;
use clippy_utils::{is_default_equivalent, is_res_lang_ctor, path_res, peel_ref_operators};
use clippy_utils::{is_default_equivalent, is_res_lang_ctor, path_res, peel_ref_operators, std_or_core};
use rustc_errors::Applicability;
use rustc_hir::LangItem::OptionNone;
use rustc_hir::{Expr, ExprKind};
@ -128,6 +128,7 @@ fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'
// check if replacement is mem::MaybeUninit::uninit().assume_init()
&& cx.tcx.is_diagnostic_item(sym::assume_init, method_def_id)
{
let Some(top_crate) = std_or_core(cx) else { return };
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
@ -136,7 +137,7 @@ fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'
"replacing with `mem::MaybeUninit::uninit().assume_init()`",
"consider using",
format!(
"std::ptr::read({})",
"{top_crate}::ptr::read({})",
snippet_with_applicability(cx, dest.span, "", &mut applicability)
),
applicability,
@ -149,6 +150,7 @@ fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'
&& let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id()
{
if cx.tcx.is_diagnostic_item(sym::mem_uninitialized, repl_def_id) {
let Some(top_crate) = std_or_core(cx) else { return };
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
@ -157,7 +159,7 @@ fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'
"replacing with `mem::uninitialized()`",
"consider using",
format!(
"std::ptr::read({})",
"{top_crate}::ptr::read({})",
snippet_with_applicability(cx, dest.span, "", &mut applicability)
),
applicability,
@ -184,14 +186,17 @@ fn check_replace_with_default(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<
return;
}
if is_default_equivalent(cx, src) && !in_external_macro(cx.tcx.sess, expr_span) {
let Some(top_crate) = std_or_core(cx) else { return };
span_lint_and_then(
cx,
MEM_REPLACE_WITH_DEFAULT,
expr_span,
"replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`",
&format!(
"replacing a value of type `T` with `T::default()` is better expressed using `{top_crate}::mem::take`"
),
|diag| {
if !expr_span.from_expansion() {
let suggestion = format!("std::mem::take({})", snippet(cx, dest.span, ""));
let suggestion = format!("{top_crate}::mem::take({})", snippet(cx, dest.span, ""));
diag.span_suggestion(
expr_span,

View file

@ -1,6 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::{get_expr_use_or_unification_node, is_no_std_crate, is_res_lang_ctor, path_res};
use clippy_utils::{get_expr_use_or_unification_node, is_res_lang_ctor, path_res, std_or_core};
use rustc_errors::Applicability;
use rustc_hir::LangItem::{OptionNone, OptionSome};
@ -58,10 +58,10 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: &str, re
return;
}
let Some(top_crate) = std_or_core(cx) else { return };
if let Some(i) = item {
let sugg = format!(
"{}::iter::once({}{})",
if is_no_std_crate(cx) { "core" } else { "std" },
"{top_crate}::iter::once({}{})",
iter_type.ref_prefix(),
snippet(cx, i.span, "...")
);
@ -81,11 +81,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: &str, re
expr.span,
&format!("`{method_name}` call on an empty collection"),
"try",
if is_no_std_crate(cx) {
"core::iter::empty()".to_string()
} else {
"std::iter::empty()".to_string()
},
format!("{top_crate}::iter::empty()"),
Applicability::MaybeIncorrect,
);
}

View file

@ -42,7 +42,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, recv: &'tcx Expr<'tcx>, join_a
)
.span_suggestion(
expr_span,
"if this is intentional, try using `Path::new` instead",
"if this is intentional, consider using `Path::new`",
format!("PathBuf::from({arg_str})"),
Applicability::Unspecified,
);

View file

@ -50,7 +50,7 @@ pub fn check(
super::MANUAL_SATURATING_ARITHMETIC,
expr.span,
"manual saturating arithmetic",
&format!("try using `saturating_{arith}`"),
&format!("consider using `saturating_{arith}`"),
format!(
"{}.saturating_{arith}({})",
snippet_with_applicability(cx, arith_lhs.span, "..", &mut applicability),

View file

@ -113,9 +113,15 @@ fn handle_path(
if let Some(path_def_id) = cx.qpath_res(qpath, arg.hir_id).opt_def_id()
&& match_def_path(cx, path_def_id, &paths::CLONE_TRAIT_METHOD)
{
// FIXME: It would be better to infer the type to check if it's copyable or not
// to suggest to use `.copied()` instead of `.cloned()` where applicable.
lint_path(cx, e.span, recv.span);
// The `copied` and `cloned` methods are only available on `&T` and `&mut T` in `Option`
// and `Result`.
if let ty::Adt(_, args) = cx.typeck_results().expr_ty(recv).kind()
&& let args = args.as_slice()
&& let Some(ty) = args.iter().find_map(|generic_arg| generic_arg.as_type())
&& ty.is_ref()
{
lint_path(cx, e.span, recv.span, is_copy(cx, ty.peel_refs()));
}
}
}
@ -139,17 +145,19 @@ fn lint_needless_cloning(cx: &LateContext<'_>, root: Span, receiver: Span) {
);
}
fn lint_path(cx: &LateContext<'_>, replace: Span, root: Span) {
fn lint_path(cx: &LateContext<'_>, replace: Span, root: Span, is_copy: bool) {
let mut applicability = Applicability::MachineApplicable;
let replacement = if is_copy { "copied" } else { "cloned" };
span_lint_and_sugg(
cx,
MAP_CLONE,
replace,
"you are explicitly cloning with `.map()`",
"consider calling the dedicated `cloned` method",
&format!("consider calling the dedicated `{replacement}` method"),
format!(
"{}.cloned()",
"{}.{replacement}()",
snippet_with_applicability(cx, root, "..", &mut applicability),
),
applicability,

View file

@ -2827,6 +2827,44 @@
"nonsensical combination of options for opening a file"
}
declare_clippy_lint! {
/// ### What it does
/// Checks for the suspicious use of `OpenOptions::create()`
/// without an explicit `OpenOptions::truncate()`.
///
/// ### Why is this bad?
/// `create()` alone will either create a new file or open an
/// existing file. If the file already exists, it will be
/// overwritten when written to, but the file will not be
/// truncated by default.
/// If less data is written to the file
/// than it already contains, the remainder of the file will
/// remain unchanged, and the end of the file will contain old
/// data.
/// In most cases, one should either use `create_new` to ensure
/// the file is created from scratch, or ensure `truncate` is
/// called so that the truncation behaviour is explicit. `truncate(true)`
/// will ensure the file is entirely overwritten with new data, whereas
/// `truncate(false)` will explicitely keep the default behavior.
///
/// ### Example
/// ```rust,no_run
/// use std::fs::OpenOptions;
///
/// OpenOptions::new().create(true);
/// ```
/// Use instead:
/// ```rust,no_run
/// use std::fs::OpenOptions;
///
/// OpenOptions::new().create(true).truncate(true);
/// ```
#[clippy::version = "1.75.0"]
pub SUSPICIOUS_OPEN_OPTIONS,
suspicious,
"suspicious combination of options for opening a file"
}
declare_clippy_lint! {
/// ### What it does
///* Checks for [push](https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.push)
@ -4033,6 +4071,7 @@ pub fn new(
MAP_ERR_IGNORE,
MUT_MUTEX_LOCK,
NONSENSICAL_OPEN_OPTIONS,
SUSPICIOUS_OPEN_OPTIONS,
PATH_BUF_PUSH_OVERWRITE,
RANGE_ZIP_WITH_LEN,
REPEAT_ONCE,

View file

@ -1,46 +1,74 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::ty::is_type_diagnostic_item;
use rustc_data_structures::fx::FxHashMap;
use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
use clippy_utils::ty::{is_type_diagnostic_item, match_type};
use clippy_utils::{match_any_def_paths, paths};
use rustc_ast::ast::LitKind;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::Ty;
use rustc_span::source_map::Spanned;
use rustc_span::{sym, Span};
use super::NONSENSICAL_OPEN_OPTIONS;
use super::{NONSENSICAL_OPEN_OPTIONS, SUSPICIOUS_OPEN_OPTIONS};
fn is_open_options(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
is_type_diagnostic_item(cx, ty, sym::FsOpenOptions) || match_type(cx, ty, &paths::TOKIO_IO_OPEN_OPTIONS)
}
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::FsOpenOptions)
&& is_open_options(cx, cx.tcx.type_of(impl_id).instantiate_identity())
{
let mut options = Vec::new();
get_open_options(cx, recv, &mut options);
check_open_options(cx, &options, e.span);
if get_open_options(cx, recv, &mut options) {
check_open_options(cx, &options, e.span);
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
#[derive(Eq, PartialEq, Clone, Debug)]
enum Argument {
True,
False,
Set(bool),
Unknown,
}
#[derive(Debug)]
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
enum OpenOption {
Write,
Append,
Create,
CreateNew,
Read,
Truncate,
Create,
Append,
Write,
}
impl std::fmt::Display for OpenOption {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
OpenOption::Append => write!(f, "append"),
OpenOption::Create => write!(f, "create"),
OpenOption::CreateNew => write!(f, "create_new"),
OpenOption::Read => write!(f, "read"),
OpenOption::Truncate => write!(f, "truncate"),
OpenOption::Write => write!(f, "write"),
}
}
}
fn get_open_options(cx: &LateContext<'_>, argument: &Expr<'_>, options: &mut Vec<(OpenOption, Argument)>) {
if let ExprKind::MethodCall(path, receiver, arguments, _) = argument.kind {
/// Collects information about a method call chain on `OpenOptions`.
/// Returns false if an unexpected expression kind was found "on the way",
/// and linting should then be avoided.
fn get_open_options(
cx: &LateContext<'_>,
argument: &Expr<'_>,
options: &mut Vec<(OpenOption, Argument, Span)>,
) -> bool {
if let ExprKind::MethodCall(path, receiver, arguments, span) = argument.kind {
let obj_ty = cx.typeck_results().expr_ty(receiver).peel_refs();
// Only proceed if this is a call on some object of type std::fs::OpenOptions
if is_type_diagnostic_item(cx, obj_ty, sym::FsOpenOptions) && !arguments.is_empty() {
if !arguments.is_empty() && is_open_options(cx, obj_ty) {
let argument_option = match arguments[0].kind {
ExprKind::Lit(span) => {
if let Spanned {
@ -48,11 +76,12 @@ fn get_open_options(cx: &LateContext<'_>, argument: &Expr<'_>, options: &mut Vec
..
} = span
{
if *lit { Argument::True } else { Argument::False }
Argument::Set(*lit)
} else {
// The function is called with a literal which is not a boolean literal.
// This is theoretically possible, but not very likely.
return;
// We'll ignore it for now
return get_open_options(cx, receiver, options);
}
},
_ => Argument::Unknown,
@ -60,106 +89,77 @@ fn get_open_options(cx: &LateContext<'_>, argument: &Expr<'_>, options: &mut Vec
match path.ident.as_str() {
"create" => {
options.push((OpenOption::Create, argument_option));
options.push((OpenOption::Create, argument_option, span));
},
"create_new" => {
options.push((OpenOption::CreateNew, argument_option, span));
},
"append" => {
options.push((OpenOption::Append, argument_option));
options.push((OpenOption::Append, argument_option, span));
},
"truncate" => {
options.push((OpenOption::Truncate, argument_option));
options.push((OpenOption::Truncate, argument_option, span));
},
"read" => {
options.push((OpenOption::Read, argument_option));
options.push((OpenOption::Read, argument_option, span));
},
"write" => {
options.push((OpenOption::Write, argument_option));
options.push((OpenOption::Write, argument_option, span));
},
_ => {
// Avoid linting altogether if this method is from a trait.
// This might be a user defined extension trait with a method like `truncate_write`
// which would be a false positive
if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(argument.hir_id)
&& cx.tcx.trait_of_item(method_def_id).is_some()
{
return false;
}
},
_ => (),
}
get_open_options(cx, receiver, options);
get_open_options(cx, receiver, options)
} else {
false
}
} else if let ExprKind::Call(callee, _) = argument.kind
&& let ExprKind::Path(path) = callee.kind
&& let Some(did) = cx.qpath_res(&path, callee.hir_id).opt_def_id()
{
match_any_def_paths(
cx,
did,
&[
&paths::TOKIO_IO_OPEN_OPTIONS_NEW,
&paths::OPEN_OPTIONS_NEW,
&paths::FILE_OPTIONS,
&paths::TOKIO_FILE_OPTIONS,
],
)
.is_some()
} else {
false
}
}
fn check_open_options(cx: &LateContext<'_>, options: &[(OpenOption, Argument)], span: Span) {
let (mut create, mut append, mut truncate, mut read, mut write) = (false, false, false, false, false);
let (mut create_arg, mut append_arg, mut truncate_arg, mut read_arg, mut write_arg) =
(false, false, false, false, false);
// This code is almost duplicated (oh, the irony), but I haven't found a way to
// unify it.
for option in options {
match *option {
(OpenOption::Create, arg) => {
if create {
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
span,
"the method `create` is called more than once",
);
} else {
create = true;
}
create_arg = create_arg || (arg == Argument::True);
},
(OpenOption::Append, arg) => {
if append {
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
span,
"the method `append` is called more than once",
);
} else {
append = true;
}
append_arg = append_arg || (arg == Argument::True);
},
(OpenOption::Truncate, arg) => {
if truncate {
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
span,
"the method `truncate` is called more than once",
);
} else {
truncate = true;
}
truncate_arg = truncate_arg || (arg == Argument::True);
},
(OpenOption::Read, arg) => {
if read {
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
span,
"the method `read` is called more than once",
);
} else {
read = true;
}
read_arg = read_arg || (arg == Argument::True);
},
(OpenOption::Write, arg) => {
if write {
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
span,
"the method `write` is called more than once",
);
} else {
write = true;
}
write_arg = write_arg || (arg == Argument::True);
},
fn check_open_options(cx: &LateContext<'_>, settings: &[(OpenOption, Argument, Span)], span: Span) {
// The args passed to these methods, if they have been called
let mut options = FxHashMap::default();
for (option, arg, sp) in settings {
if let Some((_, prev_span)) = options.insert(option.clone(), (arg.clone(), *sp)) {
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
prev_span,
&format!("the method `{}` is called more than once", &option),
);
}
}
if read && truncate && read_arg && truncate_arg && !(write && write_arg) {
if let Some((Argument::Set(true), _)) = options.get(&OpenOption::Read)
&& let Some((Argument::Set(true), _)) = options.get(&OpenOption::Truncate)
&& let None | Some((Argument::Set(false), _)) = options.get(&OpenOption::Write)
{
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
@ -167,7 +167,10 @@ fn check_open_options(cx: &LateContext<'_>, options: &[(OpenOption, Argument)],
"file opened with `truncate` and `read`",
);
}
if append && truncate && append_arg && truncate_arg {
if let Some((Argument::Set(true), _)) = options.get(&OpenOption::Append)
&& let Some((Argument::Set(true), _)) = options.get(&OpenOption::Truncate)
{
span_lint(
cx,
NONSENSICAL_OPEN_OPTIONS,
@ -175,4 +178,29 @@ fn check_open_options(cx: &LateContext<'_>, options: &[(OpenOption, Argument)],
"file opened with `append` and `truncate`",
);
}
if let Some((Argument::Set(true), create_span)) = options.get(&OpenOption::Create)
&& let None = options.get(&OpenOption::Truncate)
&& let None | Some((Argument::Set(false), _)) = options.get(&OpenOption::Append)
{
span_lint_and_then(
cx,
SUSPICIOUS_OPEN_OPTIONS,
*create_span,
"file opened with `create`, but `truncate` behavior not defined",
|diag| {
diag.span_suggestion(
create_span.shrink_to_hi(),
"add",
".truncate(true)".to_string(),
rustc_errors::Applicability::MaybeIncorrect,
)
.help("if you intend to overwrite an existing file entirely, call `.truncate(true)`")
.help(
"if you instead know that you may want to keep some parts of the old file, call `.truncate(false)`",
)
.help("alternatively, use `.append(true)` to append to the file instead of overwriting it");
},
);
}
}

View file

@ -97,7 +97,7 @@ pub(super) fn check(
};
let method_hint = if is_mut { "as_deref_mut" } else { "as_deref" };
let hint = format!("{}.{method_hint}()", snippet(cx, as_ref_recv.span, ".."));
let suggestion = format!("try using {method_hint} instead");
let suggestion = format!("consider using {method_hint}");
let msg = format!("called `{current_method}` on an `Option` value");
span_lint_and_sugg(

View file

@ -33,7 +33,7 @@ pub(super) fn check<'tcx>(
OPTION_MAP_OR_ERR_OK,
expr.span,
msg,
"try using `ok_or` instead",
"consider using `ok_or`",
format!("{self_snippet}.ok_or({err_snippet})"),
Applicability::MachineApplicable,
);

View file

@ -72,7 +72,7 @@ pub(super) fn check<'tcx>(
OPTION_MAP_OR_NONE,
expr.span,
msg,
"try using `map` instead",
"consider using `map`",
format!("{self_snippet}.map({arg_snippet} {func_snippet})"),
Applicability::MachineApplicable,
);
@ -85,7 +85,7 @@ pub(super) fn check<'tcx>(
OPTION_MAP_OR_NONE,
expr.span,
msg,
"try using `and_then` instead",
"consider using `and_then`",
format!("{self_snippet}.and_then({func_snippet})"),
Applicability::MachineApplicable,
);
@ -97,7 +97,7 @@ pub(super) fn check<'tcx>(
RESULT_MAP_OR_INTO_OPTION,
expr.span,
msg,
"try using `ok` instead",
"consider using `ok`",
format!("{self_snippet}.ok()"),
Applicability::MachineApplicable,
);

View file

@ -34,7 +34,7 @@ pub(super) fn check<'tcx>(
RESULT_MAP_OR_INTO_OPTION,
expr.span,
msg,
"try using `ok` instead",
"consider using `ok`",
format!("{self_snippet}.ok()"),
Applicability::MachineApplicable,
);

View file

@ -63,7 +63,7 @@ pub(super) fn check<'tcx>(
SEARCH_IS_SOME,
method_span.with_hi(expr.span.hi()),
&msg,
"use `any()` instead",
"consider using",
format!(
"any({})",
any_search_snippet.as_ref().map_or(&*search_snippet, String::as_str)
@ -77,7 +77,7 @@ pub(super) fn check<'tcx>(
SEARCH_IS_SOME,
expr.span,
&msg,
"use `!_.any()` instead",
"consider using",
format!(
"!{iter}.any({})",
any_search_snippet.as_ref().map_or(&*search_snippet, String::as_str)
@ -118,7 +118,7 @@ pub(super) fn check<'tcx>(
SEARCH_IS_SOME,
method_span.with_hi(expr.span.hi()),
&msg,
"use `contains()` instead",
"consider using",
format!("contains({find_arg})"),
applicability,
);
@ -132,7 +132,7 @@ pub(super) fn check<'tcx>(
SEARCH_IS_SOME,
expr.span,
&msg,
"use `!_.contains()` instead",
"consider using",
format!("!{string}.contains({find_arg})"),
applicability,
);

View file

@ -57,7 +57,7 @@ pub(super) fn check(
SINGLE_CHAR_PATTERN,
arg.span,
"single-character string constant used as pattern",
"try using a `char` instead",
"consider using a `char`",
hint,
applicability,
);

View file

@ -32,7 +32,7 @@ pub(super) fn check<'tcx>(
UNNECESSARY_JOIN,
span.with_hi(expr.span.hi()),
r#"called `.collect::<Vec<String>>().join("")` on an iterator"#,
"try using",
"consider using",
"collect::<String>()".to_owned(),
applicability,
);

View file

@ -203,7 +203,7 @@ pub(super) fn check<'tcx>(
cx,
UNNECESSARY_SORT_BY,
expr.span,
"use Vec::sort_by_key here instead",
"consider using `sort_by_key`",
"try",
format!(
"{}.sort{}_by_key(|{}| {})",
@ -226,7 +226,7 @@ pub(super) fn check<'tcx>(
cx,
UNNECESSARY_SORT_BY,
expr.span,
"use Vec::sort here instead",
"consider using `sort`",
"try",
format!(
"{}.sort{}()",

View file

@ -1,7 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::walk_ptrs_ty_depth;
use clippy_utils::{get_parent_expr, is_diag_trait_item, match_def_path, paths, peel_blocks};
use clippy_utils::{
get_parent_expr, is_diag_trait_item, match_def_path, path_to_local_id, paths, peel_blocks, strip_pat_refs,
};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@ -108,9 +110,12 @@ fn check_qpath(cx: &LateContext<'_>, qpath: hir::QPath<'_>, hir_id: hir::HirId)
fn is_calling_clone(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool {
match arg.kind {
hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
hir::ExprKind::Closure(&hir::Closure { body, .. })
// If it's a closure, we need to check what is called.
let closure_body = cx.tcx.hir().body(body);
if let closure_body = cx.tcx.hir().body(body)
&& let [param] = closure_body.params
&& let hir::PatKind::Binding(_, local_id, ..) = strip_pat_refs(param.pat).kind =>
{
let closure_expr = peel_blocks(closure_body.value);
match closure_expr.kind {
hir::ExprKind::MethodCall(method, obj, [], _) => {
@ -122,14 +127,17 @@ fn is_calling_clone(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool {
// no autoderefs
&& !cx.typeck_results().expr_adjustments(obj).iter()
.any(|a| matches!(a.kind, Adjust::Deref(Some(..))))
&& path_to_local_id(obj, local_id)
{
true
} else {
false
}
},
hir::ExprKind::Call(call, [_]) => {
if let hir::ExprKind::Path(qpath) = call.kind {
hir::ExprKind::Call(call, [recv]) => {
if let hir::ExprKind::Path(qpath) = call.kind
&& path_to_local_id(recv, local_id)
{
check_qpath(cx, qpath, call.hir_id)
} else {
false

View file

@ -5,16 +5,15 @@
use clippy_utils::{get_parent_node, inherits_cfg, is_from_proc_macro, is_self};
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_qpath, FnKind, Visitor};
use rustc_hir::intravisit::FnKind;
use rustc_hir::{
BlockCheckMode, Body, Closure, Expr, ExprKind, FnDecl, HirId, HirIdMap, HirIdSet, Impl, ItemKind, Mutability, Node,
PatKind, QPath,
PatKind,
};
use rustc_hir_typeck::expr_use_visitor as euv;
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::associated_body;
use rustc_middle::hir::nested_filter::OnlyBodies;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::{self, Ty, TyCtxt, UpvarId, UpvarPath};
use rustc_session::impl_lint_pass;
@ -234,12 +233,29 @@ fn check_fn(
}
}
fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
cx.tcx.hir().visit_all_item_likes_in_crate(&mut FnNeedsMutVisitor {
cx,
used_fn_def_ids: &mut self.used_fn_def_ids,
});
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
// #11182; do not lint if mutability is required elsewhere
if let ExprKind::Path(..) = expr.kind
&& let Some(parent) = get_parent_node(cx.tcx, expr.hir_id)
&& let ty::FnDef(def_id, _) = cx.typeck_results().expr_ty(expr).kind()
&& let Some(def_id) = def_id.as_local()
{
if let Node::Expr(e) = parent
&& let ExprKind::Call(call, _) = e.kind
&& call.hir_id == expr.hir_id
{
return;
}
// We don't need to check each argument individually as you cannot coerce a function
// taking `&mut` -> `&`, for some reason, so if we've gotten this far we know it's
// passed as a `fn`-like argument (or is unified) and should ignore every "unused"
// argument entirely
self.used_fn_def_ids.insert(def_id);
}
}
fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
for (fn_def_id, unused) in self
.fn_def_ids_to_maybe_unused_mut
.iter()
@ -501,48 +517,3 @@ fn bind(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, id: HirId) {
}
}
}
/// A final pass to check for paths referencing this function that require the argument to be
/// `&mut`, basically if the function is ever used as a `fn`-like argument.
struct FnNeedsMutVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
used_fn_def_ids: &'a mut FxHashSet<LocalDefId>,
}
impl<'tcx> Visitor<'tcx> for FnNeedsMutVisitor<'_, 'tcx> {
type NestedFilter = OnlyBodies;
fn nested_visit_map(&mut self) -> Self::Map {
self.cx.tcx.hir()
}
fn visit_qpath(&mut self, qpath: &'tcx QPath<'tcx>, hir_id: HirId, _: Span) {
walk_qpath(self, qpath, hir_id);
let Self { cx, used_fn_def_ids } = self;
// #11182; do not lint if mutability is required elsewhere
if let Node::Expr(expr) = cx.tcx.hir_node(hir_id)
&& let Some(parent) = get_parent_node(cx.tcx, expr.hir_id)
&& let ty::FnDef(def_id, _) = cx
.tcx
.typeck(cx.tcx.hir().enclosing_body_owner(hir_id))
.expr_ty(expr)
.kind()
&& let Some(def_id) = def_id.as_local()
{
if let Node::Expr(e) = parent
&& let ExprKind::Call(call, _) = e.kind
&& call.hir_id == expr.hir_id
{
return;
}
// We don't need to check each argument individually as you cannot coerce a function
// taking `&mut` -> `&`, for some reason, so if we've gotten this far we know it's
// passed as a `fn`-like argument (or is unified) and should ignore every "unused"
// argument entirely
used_fn_def_ids.insert(def_id);
}
}
}

View file

@ -1,16 +1,18 @@
use clippy_utils::diagnostics::{span_lint_hir, span_lint_hir_and_then};
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::has_drop;
use clippy_utils::{get_parent_node, is_lint_allowed, peel_blocks};
use clippy_utils::{any_parent_is_automatically_derived, get_parent_node, is_lint_allowed, path_to_local, peel_blocks};
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{
is_range_literal, BinOpKind, BlockCheckMode, Expr, ExprKind, ItemKind, Node, PatKind, Stmt, StmtKind, UnsafeSource,
is_range_literal, BinOpKind, BlockCheckMode, Expr, ExprKind, HirId, HirIdMap, ItemKind, Node, PatKind, Stmt,
StmtKind, UnsafeSource,
};
use rustc_infer::infer::TyCtxtInferExt as _;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::declare_lint_pass;
use rustc_session::impl_lint_pass;
use rustc_span::Span;
use std::ops::Deref;
declare_clippy_lint! {
@ -74,94 +76,125 @@
"outer expressions with no effect"
}
declare_lint_pass!(NoEffect => [NO_EFFECT, UNNECESSARY_OPERATION, NO_EFFECT_UNDERSCORE_BINDING]);
#[derive(Default)]
pub struct NoEffect {
underscore_bindings: HirIdMap<Span>,
local_bindings: Vec<Vec<HirId>>,
}
impl_lint_pass!(NoEffect => [NO_EFFECT, UNNECESSARY_OPERATION, NO_EFFECT_UNDERSCORE_BINDING]);
impl<'tcx> LateLintPass<'tcx> for NoEffect {
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
if check_no_effect(cx, stmt) {
if self.check_no_effect(cx, stmt) {
return;
}
check_unnecessary_operation(cx, stmt);
}
}
fn check_no_effect(cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool {
if let StmtKind::Semi(expr) = stmt.kind {
// move `expr.span.from_expansion()` ahead
if expr.span.from_expansion() {
return false;
}
let expr = peel_blocks(expr);
fn check_block(&mut self, _: &LateContext<'tcx>, _: &'tcx rustc_hir::Block<'tcx>) {
self.local_bindings.push(Vec::default());
}
if is_operator_overridden(cx, expr) {
// Return `true`, to prevent `check_unnecessary_operation` from
// linting on this statement as well.
return true;
}
if has_no_effect(cx, expr) {
span_lint_hir_and_then(
cx,
NO_EFFECT,
expr.hir_id,
stmt.span,
"statement with no effect",
|diag| {
for parent in cx.tcx.hir().parent_iter(stmt.hir_id) {
if let Node::Item(item) = parent.1
&& let ItemKind::Fn(..) = item.kind
&& let Some(Node::Block(block)) = get_parent_node(cx.tcx, stmt.hir_id)
&& let [.., final_stmt] = block.stmts
&& final_stmt.hir_id == stmt.hir_id
{
let expr_ty = cx.typeck_results().expr_ty(expr);
let mut ret_ty = cx
.tcx
.fn_sig(item.owner_id)
.instantiate_identity()
.output()
.skip_binder();
// Remove `impl Future<Output = T>` to get `T`
if cx.tcx.ty_is_opaque_future(ret_ty)
&& let Some(true_ret_ty) = cx.tcx.infer_ctxt().build().get_impl_future_output_ty(ret_ty)
{
ret_ty = true_ret_ty;
}
if !ret_ty.is_unit() && ret_ty == expr_ty {
diag.span_suggestion(
stmt.span.shrink_to_lo(),
"did you mean to return it?",
"return ",
Applicability::MaybeIncorrect,
);
}
}
}
},
);
return true;
}
} else if let StmtKind::Local(local) = stmt.kind {
if !is_lint_allowed(cx, NO_EFFECT_UNDERSCORE_BINDING, local.hir_id)
&& let Some(init) = local.init
&& local.els.is_none()
&& !local.pat.span.from_expansion()
&& has_no_effect(cx, init)
&& let PatKind::Binding(_, _, ident, _) = local.pat.kind
&& ident.name.to_ident_string().starts_with('_')
{
span_lint_hir(
cx,
NO_EFFECT_UNDERSCORE_BINDING,
init.hir_id,
stmt.span,
"binding to `_` prefixed variable with no side-effect",
);
return true;
fn check_block_post(&mut self, cx: &LateContext<'tcx>, _: &'tcx rustc_hir::Block<'tcx>) {
for hir_id in self.local_bindings.pop().unwrap() {
if let Some(span) = self.underscore_bindings.remove(&hir_id) {
span_lint_hir(
cx,
NO_EFFECT_UNDERSCORE_BINDING,
hir_id,
span,
"binding to `_` prefixed variable with no side-effect",
);
}
}
}
false
fn check_expr(&mut self, _: &LateContext<'tcx>, expr: &'tcx rustc_hir::Expr<'tcx>) {
if let Some(def_id) = path_to_local(expr) {
self.underscore_bindings.remove(&def_id);
}
}
}
impl NoEffect {
fn check_no_effect(&mut self, cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool {
if let StmtKind::Semi(expr) = stmt.kind {
// move `expr.span.from_expansion()` ahead
if expr.span.from_expansion() {
return false;
}
let expr = peel_blocks(expr);
if is_operator_overridden(cx, expr) {
// Return `true`, to prevent `check_unnecessary_operation` from
// linting on this statement as well.
return true;
}
if has_no_effect(cx, expr) {
span_lint_hir_and_then(
cx,
NO_EFFECT,
expr.hir_id,
stmt.span,
"statement with no effect",
|diag| {
for parent in cx.tcx.hir().parent_iter(stmt.hir_id) {
if let Node::Item(item) = parent.1
&& let ItemKind::Fn(..) = item.kind
&& let Some(Node::Block(block)) = get_parent_node(cx.tcx, stmt.hir_id)
&& let [.., final_stmt] = block.stmts
&& final_stmt.hir_id == stmt.hir_id
{
let expr_ty = cx.typeck_results().expr_ty(expr);
let mut ret_ty = cx
.tcx
.fn_sig(item.owner_id)
.instantiate_identity()
.output()
.skip_binder();
// Remove `impl Future<Output = T>` to get `T`
if cx.tcx.ty_is_opaque_future(ret_ty)
&& let Some(true_ret_ty) =
cx.tcx.infer_ctxt().build().get_impl_future_output_ty(ret_ty)
{
ret_ty = true_ret_ty;
}
if !ret_ty.is_unit() && ret_ty == expr_ty {
diag.span_suggestion(
stmt.span.shrink_to_lo(),
"did you mean to return it?",
"return ",
Applicability::MaybeIncorrect,
);
}
}
}
},
);
return true;
}
} else if let StmtKind::Local(local) = stmt.kind {
if !is_lint_allowed(cx, NO_EFFECT_UNDERSCORE_BINDING, local.hir_id)
&& let Some(init) = local.init
&& local.els.is_none()
&& !local.pat.span.from_expansion()
&& has_no_effect(cx, init)
&& let PatKind::Binding(_, hir_id, ident, _) = local.pat.kind
&& ident.name.to_ident_string().starts_with('_')
&& !any_parent_is_automatically_derived(cx.tcx, local.hir_id)
{
if let Some(l) = self.local_bindings.last_mut() {
l.push(hir_id);
self.underscore_bindings.insert(hir_id, ident.span);
}
return true;
}
}
false
}
}
fn is_operator_overridden(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {

View file

@ -1,13 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_opt;
use clippy_utils::std_or_core;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::LateContext;
use super::PTR_EQ;
static LINT_MSG: &str = "use `std::ptr::eq` when comparing raw pointers";
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
expr: &'tcx Expr<'_>,
@ -26,13 +25,14 @@ pub(super) fn check<'tcx>(
&& let Some(left_snip) = snippet_opt(cx, left_var.span)
&& let Some(right_snip) = snippet_opt(cx, right_var.span)
{
let Some(top_crate) = std_or_core(cx) else { return };
span_lint_and_sugg(
cx,
PTR_EQ,
expr.span,
LINT_MSG,
&format!("use `{top_crate}::ptr::eq` when comparing raw pointers"),
"try",
format!("std::ptr::eq({left_snip}, {right_snip})"),
format!("{top_crate}::ptr::eq({left_snip}, {right_snip})"),
Applicability::MachineApplicable,
);
}

View file

@ -54,7 +54,7 @@ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
};
let is_visible = |field: &FieldDef<'_>| match self.behavior {
PubUnderscoreFieldsBehaviour::PublicallyExported => cx.effective_visibilities.is_reachable(field.def_id),
PubUnderscoreFieldsBehaviour::PubliclyExported => cx.effective_visibilities.is_reachable(field.def_id),
PubUnderscoreFieldsBehaviour::AllPubFields => {
// If there is a visibility span then the field is marked pub in some way.
!field.vis_span.is_empty()

View file

@ -1,11 +1,13 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
use clippy_utils::get_enclosing_block;
use clippy_utils::higher::{get_vec_init_kind, VecInitKind};
use clippy_utils::source::snippet;
use clippy_utils::visitors::for_each_expr;
use core::ops::ControlFlow;
use hir::{Expr, ExprKind, Local, PatKind, PathSegment, QPath, StmtKind};
use hir::{Expr, ExprKind, HirId, Local, PatKind, PathSegment, QPath, StmtKind};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::declare_lint_pass;
@ -49,57 +51,40 @@
impl<'tcx> LateLintPass<'tcx> for ReadZeroByteVec {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &hir::Block<'tcx>) {
for (idx, stmt) in block.stmts.iter().enumerate() {
if !stmt.span.from_expansion()
// matches `let v = Vec::new();`
&& let StmtKind::Local(local) = stmt.kind
&& let Local { pat, init: Some(init), .. } = local
&& let PatKind::Binding(_, _, ident, _) = pat.kind
for stmt in block.stmts {
if stmt.span.from_expansion() {
return;
}
if let StmtKind::Local(local) = stmt.kind
&& let Local {
pat, init: Some(init), ..
} = local
&& let PatKind::Binding(_, id, ident, _) = pat.kind
&& let Some(vec_init_kind) = get_vec_init_kind(cx, init)
{
let visitor = |expr: &Expr<'_>| {
if let ExprKind::MethodCall(path, _, [arg], _) = expr.kind
&& let PathSegment {
ident: read_or_read_exact,
..
} = *path
&& matches!(read_or_read_exact.as_str(), "read" | "read_exact")
&& let ExprKind::AddrOf(_, hir::Mutability::Mut, inner) = arg.kind
&& let ExprKind::Path(QPath::Resolved(None, inner_path)) = inner.kind
&& let [inner_seg] = inner_path.segments
&& ident.name == inner_seg.ident.name
{
ControlFlow::Break(())
} else {
ControlFlow::Continue(())
}
let mut visitor = ReadVecVisitor {
local_id: id,
read_zero_expr: None,
has_resize: false,
};
let (read_found, next_stmt_span) = if let Some(next_stmt) = block.stmts.get(idx + 1) {
// case { .. stmt; stmt; .. }
(for_each_expr(next_stmt, visitor).is_some(), next_stmt.span)
} else if let Some(e) = block.expr {
// case { .. stmt; expr }
(for_each_expr(e, visitor).is_some(), e.span)
} else {
let Some(enclosing_block) = get_enclosing_block(cx, id) else {
return;
};
visitor.visit_block(enclosing_block);
if read_found && !next_stmt_span.from_expansion() {
if let Some(expr) = visitor.read_zero_expr {
let applicability = Applicability::MaybeIncorrect;
match vec_init_kind {
VecInitKind::WithConstCapacity(len) => {
span_lint_and_sugg(
cx,
READ_ZERO_BYTE_VEC,
next_stmt_span,
expr.span,
"reading zero byte data to `Vec`",
"try",
format!(
"{}.resize({len}, 0); {}",
ident.as_str(),
snippet(cx, next_stmt_span, "..")
),
format!("{}.resize({len}, 0); {}", ident.as_str(), snippet(cx, expr.span, "..")),
applicability,
);
},
@ -108,25 +93,20 @@ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &hir::Block<'tcx>) {
span_lint_and_sugg(
cx,
READ_ZERO_BYTE_VEC,
next_stmt_span,
expr.span,
"reading zero byte data to `Vec`",
"try",
format!(
"{}.resize({}, 0); {}",
ident.as_str(),
snippet(cx, e.span, ".."),
snippet(cx, next_stmt_span, "..")
snippet(cx, expr.span, "..")
),
applicability,
);
},
_ => {
span_lint(
cx,
READ_ZERO_BYTE_VEC,
next_stmt_span,
"reading zero byte data to `Vec`",
);
span_lint(cx, READ_ZERO_BYTE_VEC, expr.span, "reading zero byte data to `Vec`");
},
}
}
@ -134,3 +114,47 @@ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &hir::Block<'tcx>) {
}
}
}
struct ReadVecVisitor<'tcx> {
local_id: HirId,
read_zero_expr: Option<&'tcx Expr<'tcx>>,
has_resize: bool,
}
impl<'tcx> Visitor<'tcx> for ReadVecVisitor<'tcx> {
fn visit_expr(&mut self, e: &'tcx Expr<'tcx>) {
if let ExprKind::MethodCall(path, receiver, args, _) = e.kind {
let PathSegment { ident, .. } = *path;
match ident.as_str() {
"read" | "read_exact" => {
let [arg] = args else { return };
if let ExprKind::AddrOf(_, hir::Mutability::Mut, inner) = arg.kind
&& let ExprKind::Path(QPath::Resolved(None, inner_path)) = inner.kind
&& let [inner_seg] = inner_path.segments
&& let Res::Local(res_id) = inner_seg.res
&& self.local_id == res_id
{
self.read_zero_expr = Some(e);
return;
}
},
"resize" => {
// If the Vec is resized, then it's a valid read
if let ExprKind::Path(QPath::Resolved(_, inner_path)) = receiver.kind
&& let Res::Local(res_id) = inner_path.res
&& self.local_id == res_id
{
self.has_resize = true;
return;
}
},
_ => {},
}
}
if !self.has_resize && self.read_zero_expr.is_none() {
walk_expr(self, e);
}
}
}

View file

@ -5,6 +5,7 @@
use rustc_hir::{Block, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::declare_lint_pass;
use rustc_span::{ExpnKind, MacroKind, Span};
declare_clippy_lint! {
/// ### What it does
@ -39,6 +40,7 @@ impl<'tcx> LateLintPass<'tcx> for SemicolonIfNothingReturned {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) {
if !block.span.from_expansion()
&& let Some(expr) = block.expr
&& !from_attr_macro(expr.span)
&& let t_expr = cx.typeck_results().expr_ty(expr)
&& t_expr.is_unit()
&& let mut app = Applicability::MachineApplicable
@ -63,3 +65,7 @@ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) {
}
}
}
fn from_attr_macro(span: Span) -> bool {
matches!(span.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Attr, _))
}

View file

@ -1,4 +1,4 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::{is_from_proc_macro, is_in_test_function};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::LocalDefId;
@ -88,16 +88,18 @@ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
};
cx.tcx.hir().visit_all_item_likes_in_crate(&mut v);
for usage in self.def_id_to_usage.values() {
for (&def_id, usage) in &self.def_id_to_usage {
let single_call_fn_span = usage.0;
if let [caller_span] = *usage.1 {
span_lint_and_help(
span_lint_hir_and_then(
cx,
SINGLE_CALL_FN,
cx.tcx.local_def_id_to_hir_id(def_id),
single_call_fn_span,
"this function is only used once",
Some(caller_span),
"used here",
|diag| {
diag.span_help(caller_span, "used here");
},
);
}
}

View file

@ -390,6 +390,14 @@ fn get_trait_info_from_bound<'a>(bound: &'a GenericBound<'_>) -> Option<(Res, &'
}
}
fn get_ty_res(ty: Ty<'_>) -> Option<Res> {
match ty.kind {
TyKind::Path(QPath::Resolved(_, path)) => Some(path.res),
TyKind::Path(QPath::TypeRelative(ty, _)) => get_ty_res(*ty),
_ => None,
}
}
// FIXME: ComparableTraitRef does not support nested bounds needed for associated_type_bounds
fn into_comparable_trait_ref(trait_ref: &TraitRef<'_>) -> ComparableTraitRef {
ComparableTraitRef(
@ -401,10 +409,8 @@ fn into_comparable_trait_ref(trait_ref: &TraitRef<'_>) -> ComparableTraitRef {
.filter_map(|segment| {
// get trait bound type arguments
Some(segment.args?.args.iter().filter_map(|arg| {
if let GenericArg::Type(ty) = arg
&& let TyKind::Path(QPath::Resolved(_, path)) = ty.kind
{
return Some(path.res);
if let GenericArg::Type(ty) = arg {
return get_ty_res(**ty);
}
None
}))

View file

@ -1,6 +1,6 @@
use super::TRANSMUTE_INT_TO_CHAR;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::sugg;
use clippy_utils::{std_or_core, sugg};
use rustc_ast as ast;
use rustc_errors::Applicability;
use rustc_hir::Expr;
@ -25,6 +25,7 @@ pub(super) fn check<'tcx>(
e.span,
&format!("transmute from a `{from_ty}` to a `char`"),
|diag| {
let Some(top_crate) = std_or_core(cx) else { return };
let arg = sugg::Sugg::hir(cx, arg, "..");
let arg = if let ty::Int(_) = from_ty.kind() {
arg.as_ty(ast::UintTy::U32.name_str())
@ -34,7 +35,7 @@ pub(super) fn check<'tcx>(
diag.span_suggestion(
e.span,
"consider using",
format!("std::char::from_u32({arg}).unwrap()"),
format!("{top_crate}::char::from_u32({arg}).unwrap()"),
Applicability::Unspecified,
);
},

View file

@ -1,7 +1,7 @@
use super::{TRANSMUTE_BYTES_TO_STR, TRANSMUTE_PTR_TO_PTR};
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::source::snippet;
use clippy_utils::sugg;
use clippy_utils::{std_or_core, sugg};
use rustc_errors::Applicability;
use rustc_hir::{Expr, Mutability};
use rustc_lint::LateContext;
@ -25,6 +25,8 @@ pub(super) fn check<'tcx>(
&& let ty::Uint(ty::UintTy::U8) = slice_ty.kind()
&& from_mutbl == to_mutbl
{
let Some(top_crate) = std_or_core(cx) else { return true };
let postfix = if *from_mutbl == Mutability::Mut { "_mut" } else { "" };
let snippet = snippet(cx, arg.span, "..");
@ -36,9 +38,9 @@ pub(super) fn check<'tcx>(
&format!("transmute from a `{from_ty}` to a `{to_ty}`"),
"consider using",
if const_context {
format!("std::str::from_utf8_unchecked{postfix}({snippet})")
format!("{top_crate}::str::from_utf8_unchecked{postfix}({snippet})")
} else {
format!("std::str::from_utf8{postfix}({snippet}).unwrap()")
format!("{top_crate}::str::from_utf8{postfix}({snippet}).unwrap()")
},
Applicability::MaybeIncorrect,
);

View file

@ -167,7 +167,15 @@ fn check_partial_eq(cx: &LateContext<'_>, method_span: Span, method_def_id: Loca
false
}
},
ExprKind::MethodCall(segment, _receiver, &[_arg], _) if segment.ident.name == name.name => {
ExprKind::MethodCall(segment, receiver, &[_arg], _) if segment.ident.name == name.name => {
if let Some(ty) = cx.typeck_results().expr_ty_opt(receiver)
&& let Some(ty_id) = get_ty_def_id(ty)
&& self_arg != ty_id
{
// Since this called on a different type, the lint should not be
// triggered here.
return;
}
if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
&& trait_id == trait_def_id

View file

@ -1,9 +1,10 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::{is_trait_method, is_try, match_trait_method, paths};
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::{is_res_lang_ctor, is_trait_method, match_trait_method, paths};
use hir::{ExprKind, PatKind};
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::declare_lint_pass;
use rustc_span::sym;
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
@ -45,126 +46,219 @@
declare_lint_pass!(UnusedIoAmount => [UNUSED_IO_AMOUNT]);
impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount {
fn check_stmt(&mut self, cx: &LateContext<'_>, s: &hir::Stmt<'_>) {
let (hir::StmtKind::Semi(expr) | hir::StmtKind::Expr(expr)) = s.kind else {
return;
};
#[derive(Copy, Clone)]
enum IoOp {
AsyncWrite(bool),
AsyncRead(bool),
SyncRead(bool),
SyncWrite(bool),
}
match expr.kind {
hir::ExprKind::Match(res, _, _) if is_try(cx, expr).is_some() => {
if let hir::ExprKind::Call(func, [ref arg_0, ..]) = res.kind {
if matches!(
func.kind,
hir::ExprKind::Path(hir::QPath::LangItem(hir::LangItem::TryTraitBranch, ..))
) {
check_map_error(cx, arg_0, expr);
}
} else {
check_map_error(cx, res, expr);
}
},
hir::ExprKind::MethodCall(path, arg_0, ..) => match path.ident.as_str() {
"expect" | "unwrap" | "unwrap_or" | "unwrap_or_else" | "is_ok" | "is_err" => {
check_map_error(cx, arg_0, expr);
},
_ => (),
},
_ => (),
impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount {
/// We perform the check on the block level.
/// If we want to catch match and if expressions that act as returns of the block
/// we need to check them at `check_expr` or `check_block` as they are not stmts
/// but we can't check them at `check_expr` because we need the broader context
/// because we should do this only for the final expression of the block, and not for
/// `StmtKind::Local` which binds values => the io amount is used.
///
/// To check for unused io amount in stmts, we only consider `StmtKind::Semi`.
/// `StmtKind::Local` is not considered because it binds values => the io amount is used.
/// `StmtKind::Expr` is not considered because requires unit type => the io amount is used.
/// `StmtKind::Item` is not considered because it's not an expression.
///
/// We then check the individual expressions via `check_expr`. We use the same logic for
/// semi expressions and the final expression as we need to check match and if expressions
/// for binding of the io amount to `Ok(_)`.
///
/// We explicitly check for the match source to be Normal as it needs special logic
/// to consider the arms, and we want to avoid breaking the logic for situations where things
/// get desugared to match.
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'tcx>) {
for stmt in block.stmts {
if let hir::StmtKind::Semi(exp) = stmt.kind {
check_expr(cx, exp);
}
}
if let Some(exp) = block.expr
&& matches!(exp.kind, hir::ExprKind::If(_, _, _) | hir::ExprKind::Match(_, _, _))
{
check_expr(cx, exp);
}
}
}
fn check_expr<'a>(cx: &LateContext<'a>, expr: &'a hir::Expr<'a>) {
match expr.kind {
hir::ExprKind::If(cond, _, _)
if let ExprKind::Let(hir::Let { pat, init, .. }) = cond.kind
&& pattern_is_ignored_ok(cx, pat)
&& let Some(op) = should_lint(cx, init) =>
{
emit_lint(cx, cond.span, op, &[pat.span]);
},
hir::ExprKind::Match(expr, arms, hir::MatchSource::Normal) if let Some(op) = should_lint(cx, expr) => {
let found_arms: Vec<_> = arms
.iter()
.filter_map(|arm| {
if pattern_is_ignored_ok(cx, arm.pat) {
Some(arm.span)
} else {
None
}
})
.collect();
if !found_arms.is_empty() {
emit_lint(cx, expr.span, op, found_arms.as_slice());
}
},
_ if let Some(op) = should_lint(cx, expr) => {
emit_lint(cx, expr.span, op, &[]);
},
_ => {},
};
}
fn should_lint<'a>(cx: &LateContext<'a>, mut inner: &'a hir::Expr<'a>) -> Option<IoOp> {
inner = unpack_match(inner);
inner = unpack_try(inner);
inner = unpack_call_chain(inner);
inner = unpack_await(inner);
// we type-check it to get whether it's a read/write or their vectorized forms
// and keep only the ones that are produce io amount
check_io_mode(cx, inner)
}
fn pattern_is_ignored_ok(cx: &LateContext<'_>, pat: &hir::Pat<'_>) -> bool {
// the if checks whether we are in a result Ok( ) pattern
// and the return checks whether it is unhandled
if let PatKind::TupleStruct(ref path, inner_pat, ddp) = pat.kind
// we check against Result::Ok to avoid linting on Err(_) or something else.
&& is_res_lang_ctor(cx, cx.qpath_res(path, pat.hir_id), hir::LangItem::ResultOk)
{
return match (inner_pat, ddp.as_opt_usize()) {
// Ok(_) pattern
([inner_pat], None) if matches!(inner_pat.kind, PatKind::Wild) => true,
// Ok(..) pattern
([], Some(0)) => true,
_ => false,
};
}
false
}
fn unpack_call_chain<'a>(mut expr: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> {
while let hir::ExprKind::MethodCall(path, receiver, ..) = expr.kind {
if matches!(
path.ident.as_str(),
"unwrap" | "expect" | "unwrap_or" | "unwrap_or_else" | "ok" | "is_ok" | "is_err" | "or_else" | "or"
) {
expr = receiver;
} else {
break;
}
}
expr
}
fn unpack_try<'a>(mut expr: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> {
while let hir::ExprKind::Call(func, [ref arg_0, ..]) = expr.kind
&& matches!(
func.kind,
hir::ExprKind::Path(hir::QPath::LangItem(hir::LangItem::TryTraitBranch, ..))
)
{
expr = arg_0;
}
expr
}
fn unpack_match<'a>(mut expr: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> {
while let hir::ExprKind::Match(res, _, _) = expr.kind {
expr = res;
}
expr
}
/// If `expr` is an (e).await, return the inner expression "e" that's being
/// waited on. Otherwise return None.
fn try_remove_await<'a>(expr: &'a hir::Expr<'a>) -> Option<&hir::Expr<'a>> {
fn unpack_await<'a>(expr: &'a hir::Expr<'a>) -> &hir::Expr<'a> {
if let hir::ExprKind::Match(expr, _, hir::MatchSource::AwaitDesugar) = expr.kind {
if let hir::ExprKind::Call(func, [ref arg_0, ..]) = expr.kind {
if matches!(
func.kind,
hir::ExprKind::Path(hir::QPath::LangItem(hir::LangItem::IntoFutureIntoFuture, ..))
) {
return Some(arg_0);
return arg_0;
}
}
}
None
expr
}
fn check_map_error(cx: &LateContext<'_>, call: &hir::Expr<'_>, expr: &hir::Expr<'_>) {
let mut call = call;
while let hir::ExprKind::MethodCall(path, receiver, ..) = call.kind {
if matches!(path.ident.as_str(), "or" | "or_else" | "ok") {
call = receiver;
} else {
break;
/// Check whether the current expr is a function call for an IO operation
fn check_io_mode(cx: &LateContext<'_>, call: &hir::Expr<'_>) -> Option<IoOp> {
let hir::ExprKind::MethodCall(path, ..) = call.kind else {
return None;
};
let vectorized = match path.ident.as_str() {
"write_vectored" | "read_vectored" => true,
"write" | "read" => false,
_ => {
return None;
},
};
match (
is_trait_method(cx, call, sym::IoRead),
is_trait_method(cx, call, sym::IoWrite),
match_trait_method(cx, call, &paths::FUTURES_IO_ASYNCREADEXT)
|| match_trait_method(cx, call, &paths::TOKIO_IO_ASYNCREADEXT),
match_trait_method(cx, call, &paths::TOKIO_IO_ASYNCWRITEEXT)
|| match_trait_method(cx, call, &paths::FUTURES_IO_ASYNCWRITEEXT),
) {
(true, _, _, _) => Some(IoOp::SyncRead(vectorized)),
(_, true, _, _) => Some(IoOp::SyncWrite(vectorized)),
(_, _, true, _) => Some(IoOp::AsyncRead(vectorized)),
(_, _, _, true) => Some(IoOp::AsyncWrite(vectorized)),
_ => None,
}
}
fn emit_lint(cx: &LateContext<'_>, span: Span, op: IoOp, wild_cards: &[Span]) {
let (msg, help) = match op {
IoOp::AsyncRead(false) => (
"read amount is not handled",
Some("use `AsyncReadExt::read_exact` instead, or handle partial reads"),
),
IoOp::SyncRead(false) => (
"read amount is not handled",
Some("use `Read::read_exact` instead, or handle partial reads"),
),
IoOp::SyncWrite(false) => (
"written amount is not handled",
Some("use `Write::write_all` instead, or handle partial writes"),
),
IoOp::AsyncWrite(false) => (
"written amount is not handled",
Some("use `AsyncWriteExt::write_all` instead, or handle partial writes"),
),
IoOp::SyncRead(true) | IoOp::AsyncRead(true) => ("read amount is not handled", None),
IoOp::SyncWrite(true) | IoOp::AsyncWrite(true) => ("written amount is not handled", None),
};
span_lint_and_then(cx, UNUSED_IO_AMOUNT, span, msg, |diag| {
if let Some(help_str) = help {
diag.help(help_str);
}
}
if let Some(call) = try_remove_await(call) {
check_method_call(cx, call, expr, true);
} else {
check_method_call(cx, call, expr, false);
}
}
fn check_method_call(cx: &LateContext<'_>, call: &hir::Expr<'_>, expr: &hir::Expr<'_>, is_await: bool) {
if let hir::ExprKind::MethodCall(path, ..) = call.kind {
let symbol = path.ident.as_str();
let read_trait = if is_await {
match_trait_method(cx, call, &paths::FUTURES_IO_ASYNCREADEXT)
|| match_trait_method(cx, call, &paths::TOKIO_IO_ASYNCREADEXT)
} else {
is_trait_method(cx, call, sym::IoRead)
};
let write_trait = if is_await {
match_trait_method(cx, call, &paths::FUTURES_IO_ASYNCWRITEEXT)
|| match_trait_method(cx, call, &paths::TOKIO_IO_ASYNCWRITEEXT)
} else {
is_trait_method(cx, call, sym::IoWrite)
};
match (read_trait, write_trait, symbol, is_await) {
(true, _, "read", false) => span_lint_and_help(
cx,
UNUSED_IO_AMOUNT,
expr.span,
"read amount is not handled",
None,
"use `Read::read_exact` instead, or handle partial reads",
),
(true, _, "read", true) => span_lint_and_help(
cx,
UNUSED_IO_AMOUNT,
expr.span,
"read amount is not handled",
None,
"use `AsyncReadExt::read_exact` instead, or handle partial reads",
),
(true, _, "read_vectored", _) => {
span_lint(cx, UNUSED_IO_AMOUNT, expr.span, "read amount is not handled");
},
(_, true, "write", false) => span_lint_and_help(
cx,
UNUSED_IO_AMOUNT,
expr.span,
"written amount is not handled",
None,
"use `Write::write_all` instead, or handle partial writes",
),
(_, true, "write", true) => span_lint_and_help(
cx,
UNUSED_IO_AMOUNT,
expr.span,
"written amount is not handled",
None,
"use `AsyncWriteExt::write_all` instead, or handle partial writes",
),
(_, true, "write_vectored", _) => {
span_lint(cx, UNUSED_IO_AMOUNT, expr.span, "written amount is not handled");
},
_ => (),
for span in wild_cards {
diag.span_note(
*span,
"the result is consumed here, but the amount of I/O bytes remains unhandled",
);
}
}
});
}

View file

@ -66,7 +66,7 @@ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
ident.span,
"non-standard lint formulation",
None,
&format!("try using `{}` instead", formulation.correction),
&format!("consider using `{}`", formulation.correction),
);
}
return;

View file

@ -71,7 +71,9 @@ pub fn check_path(cx: &LateContext<'_>, path: &[&str]) -> bool {
SimplifiedType::Str,
]
.iter()
.flat_map(|&ty| cx.tcx.incoherent_impls(ty).iter().copied());
.flat_map(|&ty| cx.tcx.incoherent_impls(ty).into_iter())
.flatten()
.copied();
for item_def_id in lang_items.iter().map(|(_, def_id)| def_id).chain(incoherent_impls) {
let lang_item_path = cx.get_def_path(item_def_id);
if path_syms.starts_with(&lang_item_path) {

View file

@ -1,5 +1,6 @@
use rustc_ast::{ast, attr};
use rustc_errors::Applicability;
use rustc_middle::ty::{AdtDef, TyCtxt};
use rustc_session::Session;
use rustc_span::sym;
use std::str::FromStr;
@ -159,3 +160,14 @@ pub fn is_doc_hidden(attrs: &[ast::Attribute]) -> bool {
.filter_map(ast::Attribute::meta_item_list)
.any(|l| attr::list_contains_name(&l, sym::hidden))
}
pub fn has_non_exhaustive_attr(tcx: TyCtxt<'_>, adt: AdtDef<'_>) -> bool {
adt.is_variant_list_non_exhaustive()
|| tcx.has_attr(adt.did(), sym::non_exhaustive)
|| adt.variants().iter().any(|variant_def| {
variant_def.is_field_list_non_exhaustive() || tcx.has_attr(variant_def.def_id, sym::non_exhaustive)
})
|| adt
.all_fields()
.any(|field_def| tcx.has_attr(field_def.did, sym::non_exhaustive))
}

View file

@ -134,7 +134,7 @@ pub fn eq_stmt(&mut self, left: &Stmt<'_>, right: &Stmt<'_>) -> bool {
/// Checks whether two blocks are the same.
#[expect(clippy::similar_names)]
fn eq_block(&mut self, left: &Block<'_>, right: &Block<'_>) -> bool {
use TokenKind::{BlockComment, LineComment, Semi, Whitespace};
use TokenKind::{Semi, Whitespace};
if left.stmts.len() != right.stmts.len() {
return false;
}
@ -177,7 +177,7 @@ fn eq_block(&mut self, left: &Block<'_>, right: &Block<'_>) -> bool {
return false;
}
if !eq_span_tokens(self.inner.cx, lstart..lstmt_span.lo, rstart..rstmt_span.lo, |t| {
!matches!(t, Whitespace | LineComment { .. } | BlockComment { .. } | Semi)
!matches!(t, Whitespace | Semi)
}) {
return false;
}
@ -212,7 +212,7 @@ fn eq_block(&mut self, left: &Block<'_>, right: &Block<'_>) -> bool {
return false;
}
eq_span_tokens(self.inner.cx, lstart..lend, rstart..rend, |t| {
!matches!(t, Whitespace | LineComment { .. } | BlockComment { .. } | Semi)
!matches!(t, Whitespace | Semi)
})
}

View file

@ -536,7 +536,12 @@ fn find_primitive_impls<'tcx>(tcx: TyCtxt<'tcx>, name: &str) -> impl Iterator<It
"f32" => SimplifiedType::Float(FloatTy::F32),
"f64" => SimplifiedType::Float(FloatTy::F64),
#[allow(trivial_casts)]
_ => return Result::<_, rustc_errors::ErrorGuaranteed>::Ok(&[] as &[_]).into_iter().flatten().copied(),
_ => {
return Result::<_, rustc_errors::ErrorGuaranteed>::Ok(&[] as &[_])
.into_iter()
.flatten()
.copied();
},
};
tcx.incoherent_impls(ty).into_iter().flatten().copied()
@ -1712,7 +1717,6 @@ fn are_refutable<'a, I: IntoIterator<Item = &'a Pat<'a>>>(cx: &LateContext<'_>,
PatKind::Wild | PatKind::Never => false, // If `!` typechecked then the type is empty, so not refutable.
PatKind::Binding(_, _, _, pat) => pat.map_or(false, |pat| is_refutable(cx, pat)),
PatKind::Box(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat),
PatKind::Lit(..) | PatKind::Range(..) => true,
PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id),
PatKind::Or(pats) => {
// TODO: should be the honest check, that pats is exhaustive set
@ -1736,7 +1740,7 @@ fn are_refutable<'a, I: IntoIterator<Item = &'a Pat<'a>>>(cx: &LateContext<'_>,
},
}
},
PatKind::Err(_) => true,
PatKind::Lit(..) | PatKind::Range(..) | PatKind::Err(_) => true,
}
}

View file

@ -26,6 +26,7 @@
pub const EARLY_LINT_PASS: [&str; 3] = ["rustc_lint", "passes", "EarlyLintPass"];
pub const F32_EPSILON: [&str; 4] = ["core", "f32", "<impl f32>", "EPSILON"];
pub const F64_EPSILON: [&str; 4] = ["core", "f64", "<impl f64>", "EPSILON"];
pub const FILE_OPTIONS: [&str; 4] = ["std", "fs", "File", "options"];
#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
pub const FUTURES_IO_ASYNCREADEXT: [&str; 3] = ["futures_util", "io", "AsyncReadExt"];
#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
@ -50,6 +51,7 @@
pub const LATE_LINT_PASS: [&str; 3] = ["rustc_lint", "passes", "LateLintPass"];
pub const LINT: [&str; 2] = ["rustc_lint_defs", "Lint"];
pub const MSRV: [&str; 3] = ["clippy_config", "msrvs", "Msrv"];
pub const OPEN_OPTIONS_NEW: [&str; 4] = ["std", "fs", "OpenOptions", "new"];
pub const OS_STRING_AS_OS_STR: [&str; 5] = ["std", "ffi", "os_str", "OsString", "as_os_str"];
pub const OS_STR_TO_OS_STRING: [&str; 5] = ["std", "ffi", "os_str", "OsStr", "to_os_string"];
pub const PARKING_LOT_MUTEX_GUARD: [&str; 3] = ["lock_api", "mutex", "MutexGuard"];
@ -89,9 +91,15 @@
pub const SYM_MODULE: [&str; 3] = ["rustc_span", "symbol", "sym"];
pub const SYNTAX_CONTEXT: [&str; 3] = ["rustc_span", "hygiene", "SyntaxContext"];
#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
pub const TOKIO_FILE_OPTIONS: [&str; 5] = ["tokio", "fs", "file", "File", "options"];
#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
pub const TOKIO_IO_ASYNCREADEXT: [&str; 5] = ["tokio", "io", "util", "async_read_ext", "AsyncReadExt"];
#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
pub const TOKIO_IO_ASYNCWRITEEXT: [&str; 5] = ["tokio", "io", "util", "async_write_ext", "AsyncWriteExt"];
#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
pub const TOKIO_IO_OPEN_OPTIONS: [&str; 4] = ["tokio", "fs", "open_options", "OpenOptions"];
#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
pub const TOKIO_IO_OPEN_OPTIONS_NEW: [&str; 5] = ["tokio", "fs", "open_options", "OpenOptions", "new"];
pub const VEC_AS_MUT_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_mut_slice"];
pub const VEC_AS_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_slice"];
pub const VEC_DEQUE_ITER: [&str; 5] = ["alloc", "collections", "vec_deque", "VecDeque", "iter"];

View file

@ -219,7 +219,8 @@ pub fn implements_trait<'tcx>(
/// Same as `implements_trait` but allows using a `ParamEnv` different from the lint context.
///
/// The `callee_id` argument is used to determine whether this is a function call in a `const fn` environment, used for checking const traits.
/// The `callee_id` argument is used to determine whether this is a function call in a `const fn`
/// environment, used for checking const traits.
pub fn implements_trait_with_env<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ParamEnv<'tcx>,

View file

@ -1,3 +1,3 @@
[toolchain]
channel = "nightly-2024-01-11"
channel = "nightly-2024-01-25"
components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"]

View file

@ -0,0 +1,6 @@
error: multiple versions for dependency `winapi`: 0.2.8, 0.3.9
|
= note: `-D clippy::multiple-crate-versions` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::multiple_crate_versions)]`
error: could not compile `multiple-crate-versions` (bin "multiple-crate-versions") due to 1 previous error

View file

@ -0,0 +1,14 @@
# Should not lint for dev or build dependencies. See issue 5041.
[package]
# purposefully separated by - instead of _
name = "multiple-crate-versions"
version = "0.1.0"
publish = false
[workspace]
# One of the versions of winapi is only a dev dependency: allowed
[dependencies]
winapi = "0.2"
ansi_term = "=0.11.0"

View file

@ -0,0 +1,3 @@
#![warn(clippy::multiple_crate_versions)]
fn main() {}

View file

@ -0,0 +1,10 @@
[package]
name = "multiple_crate_versions"
version = "0.1.0"
publish = false
[workspace]
[dependencies]
winapi = "0.2"
ansi_term = "=0.11.0"

View file

@ -0,0 +1 @@
allowed-duplicate-crates = ["winapi"]

View file

@ -0,0 +1,3 @@
#![warn(clippy::multiple_crate_versions)]
fn main() {}

View file

@ -4,7 +4,7 @@ error: non-standard lint formulation
LL | /// Check for lint formulations that are correct
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: try using `Checks for` instead
= help: consider using `Checks for`
= note: `-D clippy::almost-standard-lint-formulation` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::almost_standard_lint_formulation)]`
@ -14,7 +14,7 @@ error: non-standard lint formulation
LL | /// Detects uses of incorrect formulations
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: try using `Checks for` instead
= help: consider using `Checks for`
error: aborting due to 2 previous errors

View file

@ -1,5 +1,5 @@
error: use of a disallowed method `rustc_lint::context::LintContext::span_lint`
--> $DIR/disallow_struct_span_lint.rs:14:5
--> $DIR/disallow_span_lint.rs:14:5
|
LL | cx.span_lint(lint, span, msg, |_| {});
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -8,10 +8,10 @@ LL | cx.span_lint(lint, span, msg, |_| {});
= help: to override `-D warnings` add `#[allow(clippy::disallowed_methods)]`
error: use of a disallowed method `rustc_middle::ty::context::TyCtxt::node_span_lint`
--> $DIR/disallow_struct_span_lint.rs:24:5
--> $DIR/disallow_span_lint.rs:24:5
|
LL | tcx.node_span_lint(lint, hir_id, span, msg, |_| {});
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 2 previous errors

View file

@ -1 +1 @@
pub-underscore-fields-behavior = "PublicallyExported"
pub-underscore-fields-behavior = "PubliclyExported"

View file

@ -3,6 +3,9 @@ foobar = 42
# so is this one
barfoo = 53
# when using underscores instead of dashes, suggest the correct one
allow_mixed_uninlined_format_args = true
# that one is ignored
[third-party]
clippy-feature = "nightly"

View file

@ -1,3 +1,4 @@
//@no-rustfix
//@error-in-other-file: unknown field `foobar`, expected one of
fn main() {}

View file

@ -11,6 +11,7 @@ error: error reading Clippy's configuration file: unknown field `foobar`, expect
allow-private-module-inception
allow-unwrap-in-tests
allowed-dotfiles
allowed-duplicate-crates
allowed-idents-below-min-chars
allowed-scripts
arithmetic-side-effects-allowed
@ -87,6 +88,7 @@ error: error reading Clippy's configuration file: unknown field `barfoo`, expect
allow-private-module-inception
allow-unwrap-in-tests
allowed-dotfiles
allowed-duplicate-crates
allowed-idents-below-min-chars
allowed-scripts
arithmetic-side-effects-allowed
@ -150,5 +152,82 @@ error: error reading Clippy's configuration file: unknown field `barfoo`, expect
LL | barfoo = 53
| ^^^^^^
error: aborting due to 2 previous errors
error: error reading Clippy's configuration file: unknown field `allow_mixed_uninlined_format_args`, expected one of
absolute-paths-allowed-crates
absolute-paths-max-segments
accept-comment-above-attributes
accept-comment-above-statement
allow-dbg-in-tests
allow-expect-in-tests
allow-mixed-uninlined-format-args
allow-one-hash-in-raw-strings
allow-print-in-tests
allow-private-module-inception
allow-unwrap-in-tests
allowed-dotfiles
allowed-duplicate-crates
allowed-idents-below-min-chars
allowed-scripts
arithmetic-side-effects-allowed
arithmetic-side-effects-allowed-binary
arithmetic-side-effects-allowed-unary
array-size-threshold
avoid-breaking-exported-api
await-holding-invalid-types
blacklisted-names
cargo-ignore-publish
check-private-items
cognitive-complexity-threshold
cyclomatic-complexity-threshold
disallowed-macros
disallowed-methods
disallowed-names
disallowed-types
doc-valid-idents
enable-raw-pointer-heuristic-for-send
enforce-iter-loop-reborrow
enforced-import-renames
enum-variant-name-threshold
enum-variant-size-threshold
excessive-nesting-threshold
future-size-threshold
ignore-interior-mutability
large-error-threshold
literal-representation-threshold
matches-for-let-else
max-fn-params-bools
max-include-file-size
max-struct-bools
max-suggested-slice-pattern-length
max-trait-bounds
min-ident-chars-threshold
missing-docs-in-crate-items
msrv
pass-by-value-size-limit
pub-underscore-fields-behavior
semicolon-inside-block-ignore-singleline
semicolon-outside-block-ignore-multiline
single-char-binding-names-threshold
stack-size-threshold
standard-macro-braces
struct-field-name-threshold
suppress-restriction-lint-in-const
third-party
too-large-for-stack
too-many-arguments-threshold
too-many-lines-threshold
trivial-copy-size-limit
type-complexity-threshold
unnecessary-box-size
unreadable-literal-lint-fractions
upper-case-acronyms-aggressive
vec-box-size-threshold
verbose-bit-mask-threshold
warn-on-all-wildcard-imports
--> $DIR/$DIR/clippy.toml:7:1
|
LL | allow_mixed_uninlined_format_args = true
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: perhaps you meant: `allow-mixed-uninlined-format-args`
error: aborting due to 3 previous errors

View file

@ -11,8 +11,8 @@
use syn::spanned::Spanned;
use syn::token::Star;
use syn::{
parse_macro_input, parse_quote, FnArg, ImplItem, ItemImpl, ItemTrait, Lifetime, Pat, PatIdent, PatType, Signature,
TraitItem, Type,
parse_macro_input, parse_quote, FnArg, ImplItem, ItemFn, ItemImpl, ItemTrait, Lifetime, Pat, PatIdent, PatType,
Signature, TraitItem, Type,
};
#[proc_macro_attribute]
@ -95,3 +95,33 @@ fn mut_receiver_of(sig: &mut Signature) -> Option<&mut FnArg> {
TokenStream::from(quote!(#item))
}
#[proc_macro_attribute]
pub fn fake_main(_attr: TokenStream, item: TokenStream) -> TokenStream {
let mut item = parse_macro_input!(item as ItemFn);
let span = item.block.brace_token.span;
if item.sig.asyncness.is_some() {
item.sig.asyncness = None;
}
let crate_name = quote! { fake_crate };
let block = item.block;
item.block = syn::parse_quote_spanned! {
span =>
{
#crate_name::block_on(async {
#block
})
}
};
quote! {
mod #crate_name {
pub fn block_on<F: ::std::future::Future>(_fut: F) {}
}
#item
}
.into()
}

View file

@ -85,4 +85,18 @@ fn block_in_match_expr(num: i32) -> i32 {
}
}
// issue #12162
macro_rules! timed {
($name:expr, $body:expr $(,)?) => {{
let __scope = ();
$body
}};
}
fn issue_12162() {
if timed!("check this!", false) {
println!();
}
}
fn main() {}

View file

@ -85,4 +85,18 @@ fn block_in_match_expr(num: i32) -> i32 {
}
}
// issue #12162
macro_rules! timed {
($name:expr, $body:expr $(,)?) => {{
let __scope = ();
$body
}};
}
fn issue_12162() {
if timed!("check this!", false) {
println!();
}
}
fn main() {}

View file

@ -9,17 +9,16 @@ fn simple_examples() {
// Simple
if true {
//~^ ERROR: all if blocks contain the same code at the start
println!("Hello World!");
println!("I'm branch nr: 1");
} else {
println!("Hello World!");
println!("I'm branch nr: 2");
}
//~^^^^^^^ ERROR: all if blocks contain the same code at the start
// Else if
if x == 0 {
//~^ ERROR: all if blocks contain the same code at the start
let y = 9;
println!("The value y was set to: `{}`", y);
let _z = y;
@ -38,6 +37,7 @@ fn simple_examples() {
println!("Ha, Pascal allows you to start the array where you want")
}
//~^^^^^^^^^^^^^^^^^^^ ERROR: all if blocks contain the same code at the start
// Return a value
let _ = if x == 7 {
@ -60,7 +60,6 @@ fn simple_but_suggestion_is_invalid() {
// Can't be automatically moved because used_value_name is getting used again
let used_value_name = 19;
if x == 10 {
//~^ ERROR: all if blocks contain the same code at the start
let used_value_name = "Different type";
println!("Str: {}", used_value_name);
let _ = 1;
@ -69,6 +68,7 @@ fn simple_but_suggestion_is_invalid() {
println!("Str: {}", used_value_name);
let _ = 2;
}
//~^^^^^^^^^ ERROR: all if blocks contain the same code at the start
let _ = used_value_name;
// This can be automatically moved as `can_be_overridden` is not used again
@ -101,11 +101,11 @@ fn check_if_same_than_else_mask() {
}
if x == 2019 {
//~^ ERROR: this `if` has identical blocks
println!("This should trigger `IS_SAME_THAN_ELSE` as usual");
} else {
println!("This should trigger `IS_SAME_THAN_ELSE` as usual");
}
//~^^^^^ ERROR: this `if` has identical blocks
}
#[allow(clippy::vec_init_then_push)]

View file

@ -2,7 +2,6 @@ error: all if blocks contain the same code at the start
--> $DIR/shared_at_top.rs:11:5
|
LL | / if true {
LL | |
LL | | println!("Hello World!");
| |_________________________________^
|
@ -21,7 +20,6 @@ error: all if blocks contain the same code at the start
--> $DIR/shared_at_top.rs:21:5
|
LL | / if x == 0 {
LL | |
LL | | let y = 9;
LL | | println!("The value y was set to: `{}`", y);
LL | | let _z = y;
@ -54,7 +52,6 @@ error: all if blocks contain the same code at the start
--> $DIR/shared_at_top.rs:62:5
|
LL | / if x == 10 {
LL | |
LL | | let used_value_name = "Different type";
LL | | println!("Str: {}", used_value_name);
| |_____________________________________________^
@ -105,13 +102,12 @@ error: this `if` has identical blocks
|
LL | if x == 2019 {
| __________________^
LL | |
LL | | println!("This should trigger `IS_SAME_THAN_ELSE` as usual");
LL | | } else {
| |_____^
|
note: same as this
--> $DIR/shared_at_top.rs:106:12
--> $DIR/shared_at_top.rs:105:12
|
LL | } else {
| ____________^

View file

@ -107,9 +107,9 @@ fn valid_examples() {
// Let's test empty blocks
if false {
//~^ ERROR: this `if` has identical blocks
} else {
}
//~^^^ ERROR: this `if` has identical blocks
}
/// This makes sure that the `if_same_then_else` masks the `shared_code_in_if_blocks` lint
@ -119,7 +119,6 @@ fn trigger_other_lint() {
// Same block
if x == 0 {
//~^ ERROR: this `if` has identical blocks
let u = 19;
println!("How are u today?");
let _ = "This is a string";
@ -128,6 +127,7 @@ fn trigger_other_lint() {
println!("How are u today?");
let _ = "This is a string";
}
//~^^^^^^^^^ ERROR: this `if` has identical blocks
// Only same expression
let _ = if x == 6 { 7 } else { 7 };
@ -138,28 +138,24 @@ fn trigger_other_lint() {
println!("Well I'm the most important block");
"I'm a pretty string"
} else if x == 68 {
//~^ ERROR: this `if` has identical blocks
println!("I'm a doppelgänger");
// Don't listen to my clone below
if y == 90 { "=^.^=" } else { ":D" }
} else {
// Don't listen to my clone above
println!("I'm a doppelgänger");
if y == 90 { "=^.^=" } else { ":D" }
};
//~^^^^^^^^^ ERROR: this `if` has identical blocks
if x == 0 {
println!("I'm single");
} else if x == 68 {
//~^ ERROR: this `if` has identical blocks
println!("I'm a doppelgänger");
// Don't listen to my clone below
} else {
// Don't listen to my clone above
println!("I'm a doppelgänger");
}
//~^^^^^ ERROR: this `if` has identical blocks
}
fn main() {}

View file

@ -3,12 +3,11 @@ error: this `if` has identical blocks
|
LL | if false {
| ______________^
LL | |
LL | | } else {
| |_____^
|
note: same as this
--> $DIR/valid_if_blocks.rs:111:12
--> $DIR/valid_if_blocks.rs:110:12
|
LL | } else {
| ____________^
@ -25,7 +24,6 @@ error: this `if` has identical blocks
|
LL | if x == 0 {
| _______________^
LL | |
LL | | let u = 19;
LL | | println!("How are u today?");
LL | | let _ = "This is a string";
@ -33,7 +31,7 @@ LL | | } else {
| |_____^
|
note: same as this
--> $DIR/valid_if_blocks.rs:126:12
--> $DIR/valid_if_blocks.rs:125:12
|
LL | } else {
| ____________^
@ -60,20 +58,17 @@ error: this `if` has identical blocks
|
LL | } else if x == 68 {
| _______________________^
LL | |
LL | | println!("I'm a doppelgänger");
LL | | // Don't listen to my clone below
LL | |
LL | | if y == 90 { "=^.^=" } else { ":D" }
LL | | } else {
| |_____^
|
note: same as this
--> $DIR/valid_if_blocks.rs:146:12
--> $DIR/valid_if_blocks.rs:144:12
|
LL | } else {
| ____________^
LL | | // Don't listen to my clone above
LL | | println!("I'm a doppelgänger");
LL | |
LL | | if y == 90 { "=^.^=" } else { ":D" }
@ -81,22 +76,19 @@ LL | | };
| |_____^
error: this `if` has identical blocks
--> $DIR/valid_if_blocks.rs:155:23
--> $DIR/valid_if_blocks.rs:153:23
|
LL | } else if x == 68 {
| _______________________^
LL | |
LL | | println!("I'm a doppelgänger");
LL | | // Don't listen to my clone below
LL | | } else {
| |_____^
|
note: same as this
--> $DIR/valid_if_blocks.rs:159:12
--> $DIR/valid_if_blocks.rs:155:12
|
LL | } else {
| ____________^
LL | | // Don't listen to my clone above
LL | | println!("I'm a doppelgänger");
LL | | }
| |_____^

Some files were not shown because too many files have changed in this diff Show more