Add semicolons for consistency

`clippy::semicolon_if_nothing_returned`
This commit is contained in:
Aramis Razzaghipour 2021-10-03 23:39:43 +11:00
parent 60c5449120
commit 55c0b86cde
No known key found for this signature in database
GPG key ID: F788F7E990136003
46 changed files with 151 additions and 151 deletions

View file

@ -179,7 +179,7 @@ fn run(mut self, inbox: Receiver<Restart>) {
tracing::error!(
"Flycheck failed to run the following command: {:?}",
self.check_command()
)
);
}
self.progress(Progress::DidFinish(res));
}
@ -253,7 +253,7 @@ fn check_command(&self) -> Command {
}
fn send(&self, check_task: Message) {
(self.sender)(check_task)
(self.sender)(check_task);
}
}
@ -334,15 +334,15 @@ fn run(self) -> io::Result<bool> {
// Skip certain kinds of messages to only spend time on what's useful
JsonMessage::Cargo(message) => match message {
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap()
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
}
cargo_metadata::Message::CompilerMessage(msg) => {
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap()
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
}
_ => (),
},
JsonMessage::Rustc(message) => {
self.sender.send(CargoMessage::Diagnostic(message)).unwrap()
self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
}
}
}

View file

@ -67,11 +67,11 @@ pub(crate) fn expr(p: &mut Parser) {
}
pub(crate) fn stmt(p: &mut Parser) {
expressions::stmt(p, expressions::StmtWithSemi::No, true)
expressions::stmt(p, expressions::StmtWithSemi::No, true);
}
pub(crate) fn stmt_optional_semi(p: &mut Parser) {
expressions::stmt(p, expressions::StmtWithSemi::Optional, false)
expressions::stmt(p, expressions::StmtWithSemi::Optional, false);
}
pub(crate) fn visibility(p: &mut Parser) {
@ -84,7 +84,7 @@ pub(crate) fn meta_item(p: &mut Parser) {
}
pub(crate) fn item(p: &mut Parser) {
items::item_or_macro(p, true)
items::item_or_macro(p, true);
}
pub(crate) fn macro_items(p: &mut Parser) {
@ -109,7 +109,7 @@ pub(crate) fn macro_stmts(p: &mut Parser) {
}
pub(crate) fn attr(p: &mut Parser) {
attributes::outer_attrs(p)
attributes::outer_attrs(p);
}
}
@ -246,7 +246,7 @@ fn name_r(p: &mut Parser, recovery: TokenSet) {
}
fn name(p: &mut Parser) {
name_r(p, TokenSet::EMPTY)
name_r(p, TokenSet::EMPTY);
}
fn name_ref(p: &mut Parser) {

View file

@ -2,13 +2,13 @@
pub(super) fn inner_attrs(p: &mut Parser) {
while p.at(T![#]) && p.nth(1) == T![!] {
attr(p, true)
attr(p, true);
}
}
pub(super) fn outer_attrs(p: &mut Parser) {
while p.at(T![#]) {
attr(p, false)
attr(p, false);
}
}

View file

@ -139,7 +139,7 @@ pub(super) fn expr_block_contents(p: &mut Parser) {
continue;
}
stmt(p, StmtWithSemi::Yes, false)
stmt(p, StmtWithSemi::Yes, false);
}
}
@ -468,12 +468,12 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
let m = lhs.precede(p);
p.bump(T![.]);
if p.at(IDENT) || p.at(INT_NUMBER) {
name_ref_or_index(p)
name_ref_or_index(p);
} else if p.at(FLOAT_NUMBER) {
// FIXME: How to recover and instead parse INT + T![.]?
p.bump_any();
} else {
p.error("expected field name or number")
p.error("expected field name or number");
}
m.complete(p, FIELD_EXPR)
}

View file

@ -374,7 +374,7 @@ fn match_expr(p: &mut Parser) -> CompletedMarker {
if p.at(T!['{']) {
match_arm_list(p);
} else {
p.error("expected `{`")
p.error("expected `{`");
}
m.complete(p, MATCH_EXPR)
}
@ -602,7 +602,7 @@ fn try_block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
if p.at(T!['{']) {
stmt_list(p);
} else {
p.error("expected a block")
p.error("expected a block");
}
m.complete(p, BLOCK_EXPR)
}
@ -639,7 +639,7 @@ fn meta_var_expr(p: &mut Parser) -> CompletedMarker {
}
_ => {
while !p.at(R_DOLLAR) {
p.bump_any()
p.bump_any();
}
p.bump(R_DOLLAR);
m.complete(p, ERROR)

View file

@ -34,7 +34,7 @@ fn generic_param(p: &mut Parser) {
T![const] => const_param(p, m),
_ => {
m.abandon(p);
p.err_and_bump("expected type parameter")
p.err_and_bump("expected type parameter");
}
}
}
@ -62,7 +62,7 @@ fn type_param(p: &mut Parser, m: Marker) {
// test type_param_default
// struct S<T = i32>;
p.bump(T![=]);
types::type_(p)
types::type_(p);
}
m.complete(p, TYPE_PARAM);
}

View file

@ -20,7 +20,7 @@
pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
attributes::inner_attrs(p);
while !p.at(EOF) && !(p.at(T!['}']) && stop_on_r_curly) {
item_or_macro(p, stop_on_r_curly)
item_or_macro(p, stop_on_r_curly);
}
}
@ -165,7 +165,7 @@ pub(super) fn opt_item(p: &mut Parser, m: Marker) -> Result<(), Marker> {
p.bump_remap(T![default]);
p.bump(T![async]);
if is_unsafe {
p.bump(T![unsafe])
p.bump(T![unsafe]);
}
has_mods = true;
}
@ -404,7 +404,7 @@ fn fn_(p: &mut Parser, m: Marker) {
// trait T { fn foo(); }
p.bump(T![;]);
} else {
expressions::block_expr(p)
expressions::block_expr(p);
}
m.complete(p, FN);
}

View file

@ -58,7 +58,7 @@ pub(super) fn enum_(p: &mut Parser, m: Marker) {
if p.at(T!['{']) {
variant_list(p);
} else {
p.error("expected `{`")
p.error("expected `{`");
}
m.complete(p, ENUM);
}

View file

@ -4,12 +4,12 @@
// const C: u32 = 92;
pub(super) fn konst(p: &mut Parser, m: Marker) {
p.bump(T![const]);
const_or_static(p, m, true)
const_or_static(p, m, true);
}
pub(super) fn static_(p: &mut Parser, m: Marker) {
p.bump(T![static]);
const_or_static(p, m, false)
const_or_static(p, m, false);
}
fn const_or_static(p: &mut Parser, m: Marker, is_const: bool) {
@ -27,7 +27,7 @@ fn const_or_static(p: &mut Parser, m: Marker, is_const: bool) {
if p.at(T![:]) {
types::ascription(p);
} else {
p.error("missing type for `const` or `static`")
p.error("missing type for `const` or `static`");
}
if p.eat(T![=]) {
expressions::expr(p);

View file

@ -6,21 +6,21 @@
// fn c(x: i32, ) {}
// fn d(x: i32, y: ()) {}
pub(super) fn param_list_fn_def(p: &mut Parser) {
list_(p, Flavor::FnDef)
list_(p, Flavor::FnDef);
}
// test param_list_opt_patterns
// fn foo<F: FnMut(&mut Foo<'a>)>(){}
pub(super) fn param_list_fn_trait(p: &mut Parser) {
list_(p, Flavor::FnTrait)
list_(p, Flavor::FnTrait);
}
pub(super) fn param_list_fn_ptr(p: &mut Parser) {
list_(p, Flavor::FnPointer)
list_(p, Flavor::FnPointer);
}
pub(super) fn param_list_closure(p: &mut Parser) {
list_(p, Flavor::Closure)
list_(p, Flavor::Closure);
}
#[derive(Debug, Clone, Copy)]
@ -104,13 +104,13 @@ fn param(p: &mut Parser, m: Marker, flavor: Flavor) -> Variadic {
Flavor::FnDef => {
patterns::pattern(p);
if variadic_param(p) {
res = Variadic(true)
res = Variadic(true);
} else if p.at(T![:]) {
types::ascription(p)
types::ascription(p);
} else {
// test_err missing_fn_param_type
// fn f(x y: i32, z, t: i32) {}
p.error("missing type for function parameter")
p.error("missing type for function parameter");
}
}
// test value_parameters_no_patterns
@ -128,11 +128,11 @@ fn param(p: &mut Parser, m: Marker, flavor: Flavor) -> Variadic {
if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) {
patterns::pattern_single(p);
if variadic_param(p) {
res = Variadic(true)
res = Variadic(true);
} else if p.at(T![:]) {
types::ascription(p)
types::ascription(p);
} else {
p.error("missing type for function parameter")
p.error("missing type for function parameter");
}
} else {
types::type_(p);

View file

@ -16,15 +16,15 @@ pub(super) fn is_use_path_start(p: &Parser) -> bool {
}
pub(super) fn use_path(p: &mut Parser) {
path(p, Mode::Use)
path(p, Mode::Use);
}
pub(crate) fn type_path(p: &mut Parser) {
path(p, Mode::Type)
path(p, Mode::Type);
}
pub(super) fn expr_path(p: &mut Parser) {
path(p, Mode::Expr)
path(p, Mode::Expr);
}
pub(crate) fn type_path_for_qualifier(p: &mut Parser, qual: CompletedMarker) -> CompletedMarker {
@ -117,7 +117,7 @@ fn opt_path_type_args(p: &mut Parser, mode: Mode) {
params::param_list_fn_trait(p);
opt_ret_type(p);
} else {
generic_args::opt_generic_arg_list(p, false)
generic_args::opt_generic_arg_list(p, false);
}
}
Mode::Expr => generic_args::opt_generic_arg_list(p, true),

View file

@ -19,7 +19,7 @@ pub(crate) fn pattern(p: &mut Parser) {
/// Parses a pattern list separated by pipes `|`.
pub(super) fn pattern_top(p: &mut Parser) {
pattern_top_r(p, PAT_RECOVERY_SET)
pattern_top_r(p, PAT_RECOVERY_SET);
}
pub(crate) fn pattern_single(p: &mut Parser) {

View file

@ -57,7 +57,7 @@ fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) {
pub(super) fn ascription(p: &mut Parser) {
assert!(p.at(T![:]));
p.bump(T![:]);
type_(p)
type_(p);
}
fn paren_or_tuple_type(p: &mut Parser) {
@ -204,7 +204,7 @@ fn fn_ptr_type(p: &mut Parser) {
if p.at(T!['(']) {
params::param_list_fn_ptr(p);
} else {
p.error("expected parameters")
p.error("expected parameters");
}
// test fn_pointer_type_with_ret
// type F = fn() -> ();
@ -274,7 +274,7 @@ fn dyn_trait_type(p: &mut Parser) {
// type C = self::Foo;
// type D = super::Foo;
pub(super) fn path_type(p: &mut Parser) {
path_type_(p, true)
path_type_(p, true);
}
// test macro_call_type

View file

@ -177,7 +177,7 @@ pub(crate) fn bump_any(&mut self) {
if kind == EOF {
return;
}
self.do_bump(kind, 1)
self.do_bump(kind, 1);
}
/// Advances the parser by one token, remapping its kind.
@ -200,7 +200,7 @@ pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) {
/// does.
pub(crate) fn error<T: Into<String>>(&mut self, message: T) {
let msg = ParseError(Box::new(message.into()));
self.push_event(Event::Error { msg })
self.push_event(Event::Error { msg });
}
/// Consume the next token if it is `kind` or emit an error
@ -258,7 +258,7 @@ fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
}
fn push_event(&mut self, event: Event) {
self.events.push(event)
self.events.push(event);
}
}

View file

@ -14,7 +14,7 @@ pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
let mut i = 0;
while i < kinds.len() {
res |= mask(kinds[i]);
i += 1
i += 1;
}
TokenSet(res)
}

View file

@ -320,7 +320,7 @@ pub(crate) fn read(self) -> tt::Subtree {
})
.collect(),
};
res[i] = Some(s)
res[i] = Some(s);
}
res[0].take().unwrap()

View file

@ -93,7 +93,7 @@ struct ProfilerImpl {
impl ProfileSpan {
pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan {
if let Some(profiler) = &mut self.0 {
profiler.detail = Some(detail())
profiler.detail = Some(detail());
}
self
}
@ -114,7 +114,7 @@ impl HeartbeatSpan {
#[inline]
pub fn new(enabled: bool) -> Self {
if enabled {
with_profile_stack(|it| it.heartbeats(true))
with_profile_stack(|it| it.heartbeats(true));
}
Self { enabled }
}
@ -123,7 +123,7 @@ pub fn new(enabled: bool) -> Self {
impl Drop for HeartbeatSpan {
fn drop(&mut self) {
if self.enabled {
with_profile_stack(|it| it.heartbeats(false))
with_profile_stack(|it| it.heartbeats(false));
}
}
}
@ -238,7 +238,7 @@ fn pop(&mut self, label: Label, detail: Option<String>) {
self.heartbeat(frame.heartbeats);
let avg_span = duration / (frame.heartbeats + 1);
if avg_span > self.filter.heartbeat_longer_than {
eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration)
eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration);
}
}
@ -292,7 +292,7 @@ fn print(
accounted_for += tree[child].duration;
if tree[child].duration.as_millis() > longer_than.as_millis() {
print(tree, child, level + 1, longer_than, out)
print(tree, child, level + 1, longer_than, out);
} else {
let (total_duration, cnt) =
short_children.entry(tree[child].label).or_insert((Duration::default(), 0));

View file

@ -92,7 +92,7 @@ pub fn cpu_span() -> CpuSpan {
{
eprintln!(
r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
)
);
}
CpuSpan { _private: () }

View file

@ -70,15 +70,15 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut prefix = "";
if instructions > 10000 {
instructions /= 1000;
prefix = "k"
prefix = "k";
}
if instructions > 10000 {
instructions /= 1000;
prefix = "m"
prefix = "m";
}
if instructions > 10000 {
instructions /= 1000;
prefix = "g"
prefix = "g";
}
write!(f, ", {}{}instr", instructions, prefix)?;
}

View file

@ -4,7 +4,7 @@
fn main() {
set_rerun();
println!("cargo:rustc-env=REV={}", rev())
println!("cargo:rustc-env=REV={}", rev());
}
fn set_rerun() {

View file

@ -33,9 +33,9 @@ pub fn list_files(dir: &Path) -> Vec<PathBuf> {
path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
if !is_hidden {
if file_type.is_dir() {
work.push(path)
work.push(path);
} else if file_type.is_file() {
res.push(path)
res.push(path);
}
}
}
@ -66,7 +66,7 @@ pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
panic!(
"Use plain (non-doc) comments with tags like {}:\n {}",
tag, first
)
);
}
block.id = id.trim().to_string();
@ -106,7 +106,7 @@ pub fn extract_untagged(text: &str) -> Vec<CommentBlock> {
}
}
if !block.contents.is_empty() {
res.push(block)
res.push(block);
}
res
}
@ -139,7 +139,7 @@ fn ensure_rustfmt() {
panic!(
"Failed to run rustfmt from toolchain 'stable'. \
Please run `rustup component add rustfmt --toolchain stable` to install it.",
)
);
}
}
@ -185,7 +185,7 @@ pub fn ensure_file_contents(file: &Path, contents: &str) {
let _ = fs::create_dir_all(parent);
}
fs::write(file, contents).unwrap();
panic!("some file was not up to date and has been updated, simply re-run the tests")
panic!("some file was not up to date and has been updated, simply re-run the tests");
}
fn normalize_newlines(s: &str) -> String {

View file

@ -45,7 +45,7 @@ fn to_snake_case<F: Fn(&char) -> char>(s: &str, change_case: F) -> String {
if c.is_ascii_uppercase() && prev {
// This check is required to not translate `Weird_Case` into `weird__case`.
if !buf.ends_with('_') {
buf.push('_')
buf.push('_');
}
}
prev = true;
@ -60,7 +60,7 @@ pub fn replace(buf: &mut String, from: char, to: &str) {
return;
}
// FIXME: do this in place.
*buf = buf.replace(from, to)
*buf = buf.replace(from, to);
}
pub fn trim_indent(mut text: &str) -> String {
@ -101,7 +101,7 @@ pub fn defer<F: FnOnce()>(f: F) -> impl Drop {
impl<F: FnOnce()> Drop for D<F> {
fn drop(&mut self) {
if let Some(f) = self.0.take() {
f()
f();
}
}
}

View file

@ -25,19 +25,19 @@ fn init() {
if !ctx.is_empty() {
eprintln!("Panic context:");
for frame in ctx.iter() {
eprintln!("> {}\n", frame)
eprintln!("> {}\n", frame);
}
}
default_hook(panic_info)
})
default_hook(panic_info);
});
};
panic::set_hook(Box::new(hook))
panic::set_hook(Box::new(hook));
}
}
impl Drop for PanicContext {
fn drop(&mut self) {
with_ctx(|ctx| assert!(ctx.pop().is_some()))
with_ctx(|ctx| assert!(ctx.pop().is_some()));
}
}
@ -45,5 +45,5 @@ fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
thread_local! {
static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
}
CTX.with(|ctx| f(&mut *ctx.borrow_mut()))
CTX.with(|ctx| f(&mut *ctx.borrow_mut()));
}

View file

@ -42,9 +42,9 @@ pub fn streaming_output(
};
for line in String::from_utf8_lossy(new_lines).lines() {
if is_out {
on_stdout_line(line)
on_stdout_line(line);
} else {
on_stderr_line(line)
on_stderr_line(line);
}
}
}

View file

@ -120,7 +120,7 @@ pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
to.iter().for_each(|to| builder.insert(offset, to.to_string()));
}
for (from, to) in self.replacements.iter() {
builder.replace(from.text_range(), to.to_string())
builder.replace(from.text_range(), to.to_string());
}
for text_range in self.deletions.iter().map(SyntaxElement::text_range) {
builder.delete(text_range);
@ -233,7 +233,7 @@ fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) {
diff.insertions.entry(insert_pos).or_insert_with(Vec::new).extend(drain);
rhs_children = rhs_children_clone;
} else {
go(diff, lhs_ele, rhs_ele)
go(diff, lhs_ele, rhs_ele);
}
}
}

View file

@ -126,7 +126,7 @@ pub(super) fn increase_indent(self, node: &SyntaxNode) {
if let Some(ws) = ast::Whitespace::cast(token) {
if ws.text().contains('\n') {
let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
ted::replace(ws.syntax(), &new_ws)
ted::replace(ws.syntax(), &new_ws);
}
}
}
@ -143,7 +143,7 @@ pub(super) fn decrease_indent(self, node: &SyntaxNode) {
let new_ws = make::tokens::whitespace(
&ws.syntax().text().replace(&format!("\n{}", self), "\n"),
);
ted::replace(ws.syntax(), &new_ws)
ted::replace(ws.syntax(), &new_ws);
}
}
}

View file

@ -49,7 +49,7 @@ fn get_or_create_where_clause(&self) -> ast::WhereClause {
} else {
Position::last_child_of(self.syntax())
};
create_where_clause(position)
create_where_clause(position);
}
self.where_clause().unwrap()
}
@ -77,7 +77,7 @@ fn get_or_create_where_clause(&self) -> ast::WhereClause {
} else {
Position::last_child_of(self.syntax())
};
create_where_clause(position)
create_where_clause(position);
}
self.where_clause().unwrap()
}
@ -107,7 +107,7 @@ fn get_or_create_where_clause(&self) -> ast::WhereClause {
} else {
Position::last_child_of(self.syntax())
};
create_where_clause(position)
create_where_clause(position);
}
self.where_clause().unwrap()
}
@ -145,7 +145,7 @@ fn get_or_create_where_clause(&self) -> ast::WhereClause {
} else {
Position::last_child_of(self.syntax())
};
create_where_clause(position)
create_where_clause(position);
}
self.where_clause().unwrap()
}
@ -177,7 +177,7 @@ fn get_or_create_where_clause(&self) -> ast::WhereClause {
} else {
Position::last_child_of(self.syntax())
};
create_where_clause(position)
create_where_clause(position);
}
self.where_clause().unwrap()
}
@ -234,7 +234,7 @@ pub fn add_generic_param(&self, generic_param: ast::GenericParam) {
}
None => {
let after_l_angle = Position::after(self.l_angle_token().unwrap());
ted::insert(after_l_angle, generic_param.syntax())
ted::insert(after_l_angle, generic_param.syntax());
}
}
}
@ -247,7 +247,7 @@ pub fn add_predicate(&self, predicate: ast::WherePred) {
ted::append_child_raw(self.syntax(), make::token(T![,]));
}
}
ted::append_child(self.syntax(), predicate.syntax())
ted::append_child(self.syntax(), predicate.syntax());
}
}
@ -267,7 +267,7 @@ impl ast::PathSegment {
pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
if self.generic_arg_list().is_none() {
let arg_list = make::generic_arg_list().clone_for_update();
ted::append_child(self.syntax(), arg_list.syntax())
ted::append_child(self.syntax(), arg_list.syntax());
}
self.generic_arg_list().unwrap()
}
@ -286,7 +286,7 @@ pub fn remove(&self) {
break;
}
}
ted::remove(self.syntax())
ted::remove(self.syntax());
}
}
@ -301,13 +301,13 @@ pub fn remove(&self) {
let ws_text = next_ws.syntax().text();
if let Some(rest) = ws_text.strip_prefix('\n') {
if rest.is_empty() {
ted::remove(next_ws.syntax())
ted::remove(next_ws.syntax());
} else {
ted::replace(next_ws.syntax(), make::tokens::whitespace(rest))
ted::replace(next_ws.syntax(), make::tokens::whitespace(rest));
}
}
}
ted::remove(self.syntax())
ted::remove(self.syntax());
}
}
@ -525,7 +525,7 @@ fn dedent(&self, by: IndentLevel) {
fn reindent_to(&self, target_level: IndentLevel) {
let current_level = IndentLevel::from_node(self.syntax());
self.dedent(current_level);
self.indent(target_level)
self.indent(target_level);
}
}

View file

@ -257,7 +257,7 @@ pub fn block_expr(
format_to!(buf, " {}\n", stmt);
}
if let Some(tail_expr) = tail_expr {
format_to!(buf, " {}\n", tail_expr)
format_to!(buf, " {}\n", tail_expr);
}
buf += "}";
ast_from_text(&format!("fn f() {}", buf))

View file

@ -609,7 +609,7 @@ fn char_ranges(
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap())
+ offset,
unescaped_char,
))
));
});
Some(res)
@ -631,7 +631,7 @@ pub fn value(&self) -> Option<u128> {
let mut text = token.text();
if let Some(suffix) = self.suffix() {
text = &text[..text.len() - suffix.len()]
text = &text[..text.len() - suffix.len()];
}
let radix = self.radix();

View file

@ -28,7 +28,7 @@ pub fn function_declaration(node: &ast::Fn) -> String {
format_to!(buf, "{} ", abi);
}
if let Some(name) = node.name() {
format_to!(buf, "fn {}", name)
format_to!(buf, "fn {}", name);
}
if let Some(type_params) = node.generic_param_list() {
format_to!(buf, "{}", type_params);

View file

@ -88,7 +88,7 @@ fn finish_node(&mut self) {
}
fn error(&mut self, error: ParseError) {
self.inner.error(error, self.text_pos)
self.inner.error(error, self.text_pos);
}
}
@ -108,7 +108,7 @@ pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) {
match mem::replace(&mut self.state, State::Normal) {
State::PendingFinish => {
self.eat_trivias();
self.inner.finish_node()
self.inner.finish_node();
}
State::PendingStart | State::Normal => unreachable!(),
}

View file

@ -81,7 +81,7 @@ fn eq(&self, other: &AstPtr<N>) -> bool {
impl<N: AstNode> Hash for AstPtr<N> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.raw.hash(state)
self.raw.hash(state);
}
}

View file

@ -56,19 +56,19 @@ pub fn finish(self) -> Parse<SyntaxNode> {
pub fn token(&mut self, kind: SyntaxKind, text: &str) {
let kind = RustLanguage::kind_to_raw(kind);
self.inner.token(kind, text)
self.inner.token(kind, text);
}
pub fn start_node(&mut self, kind: SyntaxKind) {
let kind = RustLanguage::kind_to_raw(kind);
self.inner.start_node(kind)
self.inner.start_node(kind);
}
pub fn finish_node(&mut self) {
self.inner.finish_node()
self.inner.finish_node();
}
pub fn error(&mut self, error: parser::ParseError, text_pos: TextSize) {
self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos))
self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos));
}
}

View file

@ -77,23 +77,23 @@ pub fn last_child_of(node: &(impl Into<SyntaxNode> + Clone)) -> Position {
}
pub fn insert(position: Position, elem: impl Element) {
insert_all(position, vec![elem.syntax_element()])
insert_all(position, vec![elem.syntax_element()]);
}
pub fn insert_raw(position: Position, elem: impl Element) {
insert_all_raw(position, vec![elem.syntax_element()])
insert_all_raw(position, vec![elem.syntax_element()]);
}
pub fn insert_all(position: Position, mut elements: Vec<SyntaxElement>) {
if let Some(first) = elements.first() {
if let Some(ws) = ws_before(&position, first) {
elements.insert(0, ws.into())
elements.insert(0, ws.into());
}
}
if let Some(last) = elements.last() {
if let Some(ws) = ws_after(&position, last) {
elements.push(ws.into())
elements.push(ws.into());
}
}
insert_all_raw(position, elements)
insert_all_raw(position, elements);
}
pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
let (parent, index) = match position.repr {
@ -104,10 +104,10 @@ pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
}
pub fn remove(elem: impl Element) {
elem.syntax_element().detach()
elem.syntax_element().detach();
}
pub fn remove_all(range: RangeInclusive<SyntaxElement>) {
replace_all(range, Vec::new())
replace_all(range, Vec::new());
}
pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
let mut it = range.into_iter();
@ -115,9 +115,9 @@ pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
match it.last() {
Some(mut last) => {
if first.index() > last.index() {
mem::swap(&mut first, &mut last)
mem::swap(&mut first, &mut last);
}
remove_all(first..=last)
remove_all(first..=last);
}
None => remove(first),
}
@ -125,26 +125,26 @@ pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
}
pub fn replace(old: impl Element, new: impl Element) {
replace_with_many(old, vec![new.syntax_element()])
replace_with_many(old, vec![new.syntax_element()]);
}
pub fn replace_with_many(old: impl Element, new: Vec<SyntaxElement>) {
let old = old.syntax_element();
replace_all(old.clone()..=old, new)
replace_all(old.clone()..=old, new);
}
pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) {
let start = range.start().index();
let end = range.end().index();
let parent = range.start().parent().unwrap();
parent.splice_children(start..end + 1, new)
parent.splice_children(start..end + 1, new);
}
pub fn append_child(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
let position = Position::last_child_of(node);
insert(position, child)
insert(position, child);
}
pub fn append_child_raw(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
let position = Position::last_child_of(node);
insert_raw(position, child)
insert_raw(position, child);
}
fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {

View file

@ -137,7 +137,7 @@ fn unquote(text: &str, prefix_len: usize, end_delimiter: char) -> Option<&str> {
if let Err(err) = char {
push_err(1, (range.start, err));
}
})
});
}
}
}
@ -148,7 +148,7 @@ fn unquote(text: &str, prefix_len: usize, end_delimiter: char) -> Option<&str> {
if let Err(err) = char {
push_err(2, (range.start, err));
}
})
});
}
}
}

View file

@ -19,6 +19,6 @@ pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<Syntax
"A block in this position cannot accept inner attributes",
attr.syntax().text_range(),
)
}))
}));
}
}

View file

@ -43,7 +43,7 @@ pub fn next_round(&mut self) -> bool {
}
pub fn sample(&mut self, x: f64, y: f64) {
self.rounds.last_mut().unwrap().samples.push((x, y))
self.rounds.last_mut().unwrap().samples.push((x, y));
}
}
@ -54,7 +54,7 @@ fn drop(&mut self) {
for round in &self.rounds {
eprintln!("\n{}", round.plot);
}
panic!("Doesn't look linear!")
panic!("Doesn't look linear!");
}
}
}

View file

@ -142,14 +142,14 @@ pub fn parse(ra_fixture: &str) -> (Option<MiniCore>, Vec<String>, Vec<Fixture>)
if line.starts_with("//-") {
let meta = Fixture::parse_meta_line(line);
res.push(meta)
res.push(meta);
} else {
if line.starts_with("// ")
&& line.contains(':')
&& !line.contains("::")
&& line.chars().all(|it| !it.is_uppercase())
{
panic!("looks like invalid metadata line: {:?}", line)
panic!("looks like invalid metadata line: {:?}", line);
}
if let Some(entry) = res.last_mut() {
@ -256,9 +256,9 @@ fn parse(line: &str) -> MiniCore {
let line = line.strip_prefix("//- minicore:").unwrap().trim();
for entry in line.split(", ") {
if res.has_flag(entry) {
panic!("duplicate minicore flag: {:?}", entry)
panic!("duplicate minicore flag: {:?}", entry);
}
res.activated_flags.push(entry.to_string())
res.activated_flags.push(entry.to_string());
}
res
@ -354,7 +354,7 @@ pub fn source_code(mut self) -> String {
}
if keep {
buf.push_str(line)
buf.push_str(line);
}
if line_region {
active_regions.pop().unwrap();

View file

@ -244,7 +244,7 @@ pub fn extract_annotations(text: &str) -> Vec<(TextRange, String)> {
range + line_start.1
};
res.push((range, content))
res.push((range, content));
}
LineAnnotation::Continuation { mut offset, content } => {
offset += annotation_offset;
@ -301,7 +301,7 @@ fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
let mut file = false;
if !continuation && content.starts_with("file") {
file = true;
content = &content["file".len()..]
content = &content["file".len()..];
}
let content = content.trim().to_string();
@ -371,7 +371,7 @@ fn main() {
pub fn skip_slow_tests() -> bool {
let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err();
if should_skip {
eprintln!("ignoring slow test")
eprintln!("ignoring slow test");
} else {
let path = project_root().join("./target/.slow_tests_cookie");
fs::write(&path, ".").unwrap();
@ -432,7 +432,7 @@ struct Bencher {
impl Drop for Bencher {
fn drop(&mut self) {
eprintln!("{}: {}", self.label, self.sw.elapsed())
eprintln!("{}: {}", self.label, self.sw.elapsed());
}
}

View file

@ -110,7 +110,7 @@ pub fn apply(&self, text: &mut String) {
// FIXME: figure out a way to mutate the text in-place or reuse the
// memory in some other way
*text = buf
*text = buf;
}
pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> {
@ -163,13 +163,13 @@ pub fn is_empty(&self) -> bool {
self.indels.is_empty()
}
pub fn replace(&mut self, range: TextRange, replace_with: String) {
self.indel(Indel::replace(range, replace_with))
self.indel(Indel::replace(range, replace_with));
}
pub fn delete(&mut self, range: TextRange) {
self.indel(Indel::delete(range))
self.indel(Indel::delete(range));
}
pub fn insert(&mut self, offset: TextSize, text: String) {
self.indel(Indel::insert(offset, text))
self.indel(Indel::insert(offset, text));
}
pub fn finish(self) -> TextEdit {
let mut indels = self.indels;

View file

@ -169,7 +169,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match tt {
TokenTree::Leaf(Leaf::Punct(p)) => {
needs_space = p.spacing == Spacing::Alone;
fmt::Display::fmt(p, f)?
fmt::Display::fmt(p, f)?;
}
tt => fmt::Display::fmt(tt, f)?,
}

View file

@ -38,7 +38,7 @@ fn spawn(sender: loader::Sender) -> NotifyHandle {
NotifyHandle { sender, _thread: thread }
}
fn set_config(&mut self, config: loader::Config) {
self.sender.send(Message::Config(config)).unwrap()
self.sender.send(Message::Config(config)).unwrap();
}
fn invalidate(&mut self, path: AbsPathBuf) {
self.sender.send(Message::Invalidate(path)).unwrap();
@ -84,7 +84,7 @@ fn run(mut self, inbox: Receiver<Message>) {
if !config.watch.is_empty() {
let (watcher_sender, watcher_receiver) = unbounded();
let watcher = log_notify_error(RecommendedWatcher::new(move |event| {
watcher_sender.send(event).unwrap()
watcher_sender.send(event).unwrap();
}));
self.watcher = watcher.map(|it| (it, watcher_receiver));
}
@ -99,7 +99,7 @@ fn run(mut self, inbox: Receiver<Message>) {
for (i, entry) in config.load.into_iter().enumerate() {
let watch = config.watch.contains(&i);
if watch {
self.watched_entries.push(entry.clone())
self.watched_entries.push(entry.clone());
}
let files = self.load_entry(entry, watch);
self.send(loader::Message::Loaded { files });
@ -149,7 +149,7 @@ fn run(mut self, inbox: Receiver<Message>) {
Some((path, contents))
})
.collect();
self.send(loader::Message::Loaded { files })
self.send(loader::Message::Loaded { files });
}
}
}
@ -165,7 +165,7 @@ fn load_entry(
.into_iter()
.map(|file| {
if watch {
self.watch(file.clone())
self.watch(file.clone());
}
let contents = read(file.as_path());
(file, contents)
@ -218,7 +218,7 @@ fn watch(&mut self, path: AbsPathBuf) {
}
}
fn send(&mut self, msg: loader::Message) {
(self.sender)(msg)
(self.sender)(msg);
}
}

View file

@ -112,7 +112,7 @@ pub fn partition(&self, vfs: &Vfs) -> Vec<FileSet> {
let mut res = vec![FileSet::default(); self.len()];
for (file_id, path) in vfs.iter() {
let root = self.classify(path, &mut scratch_space);
res[root].insert(file_id, path.clone())
res[root].insert(file_id, path.clone());
}
res
}
@ -157,7 +157,7 @@ pub fn len(&self) -> usize {
/// Add a new set of paths prefixes.
pub fn add_file_set(&mut self, roots: Vec<VfsPath>) {
self.roots.push(roots)
self.roots.push(roots);
}
/// Build the `FileSetConfig`.

View file

@ -357,7 +357,7 @@ fn join(&self, mut path: &str) -> Option<VirtualPath> {
if !res.pop() {
return None;
}
path = &path["../".len()..]
path = &path["../".len()..];
}
path = path.trim_start_matches("./");
res.0 = format!("{}/{}", res.0, path);

View file

@ -63,7 +63,7 @@ impl<T> Eq for Idx<T> {}
impl<T> Hash for Idx<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.raw.hash(state)
self.raw.hash(state);
}
}
@ -71,7 +71,7 @@ impl<T> fmt::Debug for Idx<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut type_name = std::any::type_name::<T>();
if let Some(idx) = type_name.rfind(':') {
type_name = &type_name[idx + 1..]
type_name = &type_name[idx + 1..];
}
write!(f, "Idx::<{}>({})", type_name, self.raw)
}

View file

@ -13,7 +13,7 @@
impl flags::Install {
pub(crate) fn run(self) -> Result<()> {
if cfg!(target_os = "macos") {
fix_path_for_mac().context("Fix path for mac")?
fix_path_for_mac().context("Fix path for mac")?;
}
if let Some(server) = self.server() {
install_server(server).context("install server")?;
@ -148,7 +148,7 @@ fn install_server(opts: ServerOpt) -> Result<()> {
eprintln!(
"\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
REQUIRED_RUST_VERSION,
)
);
}
let features = match opts.malloc {
Malloc::System => &[][..],