diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index 455ffd423..f63cd57cd 100755 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -77,15 +77,12 @@ impl fmt::Display for VersionInfo { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?; - match self.cfg_info.as_ref().map(|ci| &ci.release_channel) { - Some(channel) => { - if channel != "stable" { - write!(f, "-{}", channel)?; - let empty = String::from(""); - write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?; - } - }, - None => (), + if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) { + if channel != "stable" { + write!(f, "-{}", channel)?; + let empty = String::from(""); + write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?; + } }; if let Some(ref cfg) = self.cfg_info { diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index a96a26bb0..9ada0fb74 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -583,16 +583,13 @@ pub fn parse_dep_info(dep_info: &Path) -> CargoResult>> { let mut paths = Vec::new(); let mut deps = deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty()); - loop { - let mut file = match deps.next() { - Some(s) => s.to_string(), - None => break, - }; - while file.ends_with("\\") { + while let Some(s) = deps.next() { + let mut file = s.to_string(); + while file.ends_with('\\') { file.pop(); file.push(' '); file.push_str(deps.next().chain_error(|| { - internal(format!("malformed dep-info format, trailing \\")) + internal("malformed dep-info format, trailing \\".to_string()) })?); } paths.push(cwd.join(&file)); @@ -602,7 +599,7 @@ pub fn parse_dep_info(dep_info: &Path) -> CargoResult>> { fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> { if let Some(paths) = parse_dep_info(dep_info)? { - Ok(mtime_if_fresh(&dep_info, paths.iter())) + Ok(mtime_if_fresh(dep_info, paths.iter())) } else { Ok(None) } diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs index a3570042e..f090cbc72 100644 --- a/src/cargo/ops/cargo_rustc/layout.rs +++ b/src/cargo/ops/cargo_rustc/layout.rs @@ -76,7 +76,7 @@ impl Layout { // the target triple as a Path and then just use the file stem as the // component for the directory name. if let Some(triple) = triple { - path.push(Path::new(triple).file_stem().ok_or(human(format!("target was empty")))?); + path.push(Path::new(triple).file_stem().ok_or(human("target was empty".to_string()))?); } path.push(dest); Layout::at(ws.config(), path) diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index 7b7ed1e76..a8cfa13b7 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -170,7 +170,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, let pkgid = unit.pkg.package_id(); if !unit.target.is_lib() { continue } if unit.profile.doc { continue } - if cx.compilation.libraries.contains_key(&pkgid) { + if cx.compilation.libraries.contains_key(pkgid) { continue } @@ -182,9 +182,9 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, } } - if let Some(feats) = cx.resolve.features(&unit.pkg.package_id()) { + if let Some(feats) = cx.resolve.features(unit.pkg.package_id()) { cx.compilation.cfgs.entry(unit.pkg.package_id().clone()) - .or_insert(HashSet::new()) + .or_insert_with(HashSet::new) .extend(feats.iter().map(|feat| format!("feature=\"{}\"", feat))); } @@ -193,7 +193,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() { cx.compilation.cfgs.entry(pkg.clone()) - .or_insert(HashSet::new()) + .or_insert_with(HashSet::new) .extend(output.cfgs.iter().cloned()); for dir in output.library_paths.iter() { @@ -344,7 +344,7 @@ fn rustc(cx: &mut Context, unit: &Unit, exec: Arc) -> CargoResult>(path: P, basedir: Option<&str>) -> CargoResul fn add_deps_for_unit<'a, 'b>(deps: &mut HashSet, context: &mut Context<'a, 'b>, unit: &Unit<'a>, visited: &mut HashSet>) -> CargoResult<()> { - if !visited.insert(unit.clone()) { + if !visited.insert(*unit) { return Ok(()); } @@ -76,13 +76,10 @@ pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) -> // dep-info generation failed, so delete output file. This will usually // cause the build system to always rerun the build rule, which is correct // if inefficient. - match fs::remove_file(output_path) { - Err(err) => { - if err.kind() != ErrorKind::NotFound { - return Err(err.into()); - } + if let Err(err) = fs::remove_file(output_path) { + if err.kind() != ErrorKind::NotFound { + return Err(err.into()); } - _ => () } } } diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index a7a4504d3..145559248 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -85,7 +85,7 @@ fn run_unit_tests(options: &TestOptions, let mut errors = Vec::new(); for &(ref pkg, _, ref exe) in &compilation.tests { - let to_display = match util::without_prefix(exe, &cwd) { + let to_display = match util::without_prefix(exe, cwd) { Some(path) => path, None => &**exe, }; @@ -145,7 +145,7 @@ fn run_doc_tests(options: &TestOptions, p.arg("--test-args").arg(arg); } - if let Some(cfgs) = compilation.cfgs.get(&package.package_id()) { + if let Some(cfgs) = compilation.cfgs.get(package.package_id()) { for cfg in cfgs.iter() { p.arg("--cfg").arg(cfg); } diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index 30eec985a..6b055dfbb 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -67,7 +67,7 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> emit_package(root.as_table().unwrap(), &mut out); } - let deps = e.toml.get(&"package".to_string()).unwrap().as_slice().unwrap(); + let deps = e.toml[&"package".to_string()].as_slice().unwrap(); for dep in deps.iter() { let dep = dep.as_table().unwrap(); @@ -75,12 +75,9 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> emit_package(dep, &mut out); } - match e.toml.get(&"metadata".to_string()) { - Some(metadata) => { - out.push_str("[metadata]\n"); - out.push_str(&metadata.to_string()); - } - None => {} + if let Some(metadata) = e.toml.get(&"metadata".to_string()) { + out.push_str("[metadata]\n"); + out.push_str(&metadata.to_string()); } // If the lockfile contents haven't changed so don't rewrite it. This is @@ -128,8 +125,8 @@ fn emit_package(dep: &toml::Table, out: &mut String) { out.push_str(&format!("source = {}\n", lookup(dep, "source"))); } - if let Some(ref s) = dep.get("dependencies") { - let slice = Value::as_slice(*s).unwrap(); + if let Some(s) = dep.get("dependencies") { + let slice = Value::as_slice(s).unwrap(); if !slice.is_empty() { out.push_str("dependencies = [\n"); diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 447c19f28..00cc2fb9c 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -51,7 +51,7 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { let (mut registry, reg_id) = registry(opts.config, opts.token.clone(), opts.index.clone())?; - verify_dependencies(&pkg, ®_id)?; + verify_dependencies(pkg, ®_id)?; // Prepare a tarball, with a non-surpressable warning if metadata // is missing since this is being put online. @@ -66,7 +66,7 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { // Upload said tarball to the specified destination opts.config.shell().status("Uploading", pkg.package_id().to_string())?; - transmit(opts.config, &pkg, tarball.file(), &mut registry, opts.dry_run)?; + transmit(opts.config, pkg, tarball.file(), &mut registry, opts.dry_run)?; Ok(()) } @@ -121,13 +121,10 @@ fn transmit(config: &Config, Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?), None => None, }; - match *license_file { - Some(ref file) => { - if fs::metadata(&pkg.root().join(file)).is_err() { - bail!("the license file `{}` does not exist", file) - } + if let Some(ref file) = *license_file { + if fs::metadata(&pkg.root().join(file)).is_err() { + bail!("the license file `{}` does not exist", file) } - None => {} } // Do not upload if performing a dry run @@ -246,18 +243,13 @@ pub fn http_handle(config: &Config) -> CargoResult { /// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified /// via environment variables are picked up by libcurl. fn http_proxy(config: &Config) -> CargoResult> { - match config.get_string("http.proxy")? { - Some(s) => return Ok(Some(s.val)), - None => {} + if let Some(s) = config.get_string("http.proxy")? { + return Ok(Some(s.val)) } - match git2::Config::open_default() { - Ok(cfg) => { - match cfg.get_str("http.proxy") { - Ok(s) => return Ok(Some(s.to_string())), - Err(..) => {} - } + if let Ok(cfg) = git2::Config::open_default() { + if let Ok(s) = cfg.get_str("http.proxy") { + return Ok(Some(s.to_string())) } - Err(..) => {} } Ok(None) } @@ -282,9 +274,8 @@ pub fn http_proxy_exists(config: &Config) -> CargoResult { } pub fn http_timeout(config: &Config) -> CargoResult> { - match config.get_i64("http.timeout")? { - Some(s) => return Ok(Some(s.val)), - None => {} + if let Some(s) = config.get_i64("http.timeout")? { + return Ok(Some(s.val)) } Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) } @@ -293,11 +284,8 @@ pub fn registry_login(config: &Config, token: String) -> CargoResult<()> { let RegistryConfig { index, token: _ } = registry_configuration(config)?; let mut map = HashMap::new(); let p = config.cwd().to_path_buf(); - match index { - Some(index) => { - map.insert("index".to_string(), ConfigValue::String(index, p.clone())); - } - None => {} + if let Some(index) = index { + map.insert("index".to_string(), ConfigValue::String(index, p.clone())); } map.insert("token".to_string(), ConfigValue::String(token, p)); @@ -327,28 +315,22 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { let (mut registry, _) = registry(config, opts.token.clone(), opts.index.clone())?; - match opts.to_add { - Some(ref v) => { - let v = v.iter().map(|s| &s[..]).collect::>(); - config.shell().status("Owner", format!("adding {:?} to crate {}", - v, name))?; - registry.add_owners(&name, &v).map_err(|e| { - human(format!("failed to add owners to crate {}: {}", name, e)) - })?; - } - None => {} + if let Some(ref v) = opts.to_add { + let v = v.iter().map(|s| &s[..]).collect::>(); + config.shell().status("Owner", format!("adding {:?} to crate {}", + v, name))?; + registry.add_owners(&name, &v).map_err(|e| { + human(format!("failed to add owners to crate {}: {}", name, e)) + })?; } - match opts.to_remove { - Some(ref v) => { - let v = v.iter().map(|s| &s[..]).collect::>(); - config.shell().status("Owner", format!("removing {:?} from crate {}", - v, name))?; - registry.remove_owners(&name, &v).map_err(|e| { - human(format!("failed to remove owners from crate {}: {}", name, e)) - })?; - } - None => {} + if let Some(ref v) = opts.to_remove { + let v = v.iter().map(|s| &s[..]).collect::>(); + config.shell().status("Owner", format!("removing {:?} from crate {}", + v, name))?; + registry.remove_owners(&name, &v).map_err(|e| { + human(format!("failed to remove owners from crate {}: {}", name, e)) + })?; } if opts.list { diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 61f25d23d..c4caf63e0 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -61,7 +61,7 @@ pub fn resolve_ws_precisely<'a>(ws: &Workspace<'a>, let resolved_with_overrides = ops::resolve_with_previous(&mut registry, ws, method, Some(&resolve), None, - &specs)?; + specs)?; for &(ref replace_spec, _) in ws.root_replace() { if !resolved_with_overrides.replacements().keys().any(|r| replace_spec.matches(r)) { diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs index 661cff6f9..c55fc3669 100644 --- a/src/cargo/sources/directory.rs +++ b/src/cargo/sources/directory.rs @@ -76,7 +76,7 @@ impl<'cfg> Source for DirectorySource<'cfg> { // crates and otherwise may conflict with a VCS // (rust-lang/cargo#3414). if let Some(s) = path.file_name().and_then(|s| s.to_str()) { - if s.starts_with(".") { + if s.starts_with('.') { continue } } diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index eb0725ce9..326fa94f2 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -146,7 +146,7 @@ impl<'cfg> Source for GitSource<'cfg> { trace!("updating git source `{:?}`", self.remote); - let repo = self.remote.checkout(&db_path, &self.config)?; + let repo = self.remote.checkout(&db_path, self.config)?; let rev = repo.rev_for(&self.reference)?; (repo, rev) } else { @@ -166,7 +166,7 @@ impl<'cfg> Source for GitSource<'cfg> { // in scope so the destructors here won't tamper with too much. // Checkout is immutable, so we don't need to protect it with a lock once // it is created. - repo.copy_to(actual_rev.clone(), &checkout_path, &self.config)?; + repo.copy_to(actual_rev.clone(), &checkout_path, self.config)?; let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); let path_source = PathSource::new_recursive(&checkout_path, diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index 82d06d317..c8676d655 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -120,13 +120,13 @@ impl GitRemote { pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult { let repo = match git2::Repository::open(into) { Ok(repo) => { - self.fetch_into(&repo, &cargo_config).chain_error(|| { + self.fetch_into(&repo, cargo_config).chain_error(|| { human(format!("failed to fetch into {}", into.display())) })?; repo } Err(..) => { - self.clone_into(into, &cargo_config).chain_error(|| { + self.clone_into(into, cargo_config).chain_error(|| { human(format!("failed to clone into: {}", into.display())) })? } @@ -152,7 +152,7 @@ impl GitRemote { // Create a local anonymous remote in the repository to fetch the url let url = self.url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; - fetch(dst, &url, refspec, &cargo_config) + fetch(dst, &url, refspec, cargo_config) } fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult { @@ -162,7 +162,7 @@ impl GitRemote { } fs::create_dir_all(dst)?; let repo = git2::Repository::init_bare(dst)?; - fetch(&repo, &url, "refs/heads/*:refs/heads/*", &cargo_config)?; + fetch(&repo, &url, "refs/heads/*:refs/heads/*", cargo_config)?; Ok(repo) } } @@ -178,7 +178,7 @@ impl GitDatabase { Ok(repo) => { let checkout = GitCheckout::new(dest, self, rev, repo); if !checkout.is_fresh() { - checkout.fetch(&cargo_config)?; + checkout.fetch(cargo_config)?; checkout.reset()?; assert!(checkout.is_fresh()); } @@ -186,7 +186,7 @@ impl GitDatabase { } Err(..) => GitCheckout::clone_into(dest, self, rev)?, }; - checkout.update_submodules(&cargo_config).chain_error(|| { + checkout.update_submodules(cargo_config).chain_error(|| { internal("failed to update submodules") })?; Ok(checkout) @@ -297,7 +297,7 @@ impl<'a> GitCheckout<'a> { let url = self.database.path.to_url()?; let url = url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; - fetch(&self.repo, &url, refspec, &cargo_config)?; + fetch(&self.repo, &url, refspec, cargo_config)?; Ok(()) } @@ -320,7 +320,7 @@ impl<'a> GitCheckout<'a> { } fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> { - return update_submodules(&self.repo, &cargo_config); + return update_submodules(&self.repo, cargo_config); fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { info!("update submodules for: {:?}", repo.workdir().unwrap()); @@ -362,14 +362,14 @@ impl<'a> GitCheckout<'a> { // Fetch data from origin and reset to the head commit let refspec = "refs/heads/*:refs/heads/*"; - fetch(&repo, url, refspec, &cargo_config).chain_error(|| { + fetch(&repo, url, refspec, cargo_config).chain_error(|| { internal(format!("failed to fetch submodule `{}` from {}", child.name().unwrap_or(""), url)) })?; let obj = repo.find_object(head, None)?; repo.reset(&obj, git2::ResetType::Hard, None)?; - update_submodules(&repo, &cargo_config)?; + update_submodules(&repo, cargo_config)?; } Ok(()) } @@ -459,7 +459,7 @@ fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) let username = username.unwrap(); debug_assert!(!ssh_username_requested); ssh_agent_attempts.push(username.to_string()); - return git2::Cred::ssh_key_from_agent(&username) + return git2::Cred::ssh_key_from_agent(username) } // Sometimes libgit2 will ask for a username/password in plaintext. This @@ -554,7 +554,7 @@ fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) res.chain_error(|| { let mut msg = "failed to authenticate when downloading \ repository".to_string(); - if ssh_agent_attempts.len() > 0 { + if !ssh_agent_attempts.is_empty() { let names = ssh_agent_attempts.iter() .map(|s| format!("`{}`", s)) .collect::>() @@ -590,7 +590,7 @@ pub fn fetch(repo: &git2::Repository, cb.credentials(f); // Create a local anonymous remote in the repository to fetch the url - let mut remote = repo.remote_anonymous(&url)?; + let mut remote = repo.remote_anonymous(url)?; let mut opts = git2::FetchOptions::new(); opts.remote_callbacks(cb) .download_tags(git2::AutotagOption::All); @@ -610,7 +610,7 @@ pub fn clone(url: &str, target: &Path, config: &Config) -> CargoResult<()> { target.display())) })?; let refspec = "refs/heads/*:refs/heads/*"; - fetch(&repo, &url, refspec, &config).chain_error(||{ + fetch(&repo, url, refspec, &config).chain_error(||{ human(format!("failed to fecth `{}`", url)) })?; let reference = "HEAD"; diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 035474b8a..77bf165eb 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -107,10 +107,10 @@ impl<'cfg> PathSource<'cfg> { .collect::, _>>()?; let mut filter = |p: &Path| { - let relative_path = util::without_prefix(p, &root).unwrap(); - include.iter().any(|p| p.matches_path(&relative_path)) || { + let relative_path = util::without_prefix(p, root).unwrap(); + include.iter().any(|p| p.matches_path(relative_path)) || { include.is_empty() && - !exclude.iter().any(|p| p.matches_path(&relative_path)) + !exclude.iter().any(|p| p.matches_path(relative_path)) } }; @@ -171,24 +171,24 @@ impl<'cfg> PathSource<'cfg> { let index_files = index.iter().map(|entry| { use libgit2_sys::GIT_FILEMODE_COMMIT; let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; - (join(&root, &entry.path), Some(is_dir)) + (join(root, &entry.path), Some(is_dir)) }); let mut opts = git2::StatusOptions::new(); opts.include_untracked(true); - if let Some(suffix) = util::without_prefix(pkg_path, &root) { + if let Some(suffix) = util::without_prefix(pkg_path, root) { opts.pathspec(suffix); } let statuses = repo.statuses(Some(&mut opts))?; let untracked = statuses.iter().filter_map(|entry| { match entry.status() { - git2::STATUS_WT_NEW => Some((join(&root, entry.path_bytes()), None)), + git2::STATUS_WT_NEW => Some((join(root, entry.path_bytes()), None)), _ => None } }); let mut subpackages_found = Vec::new(); - 'outer: for (file_path, is_dir) in index_files.chain(untracked) { + for (file_path, is_dir) in index_files.chain(untracked) { let file_path = file_path?; // Filter out files blatantly outside this package. This is helped a @@ -229,7 +229,7 @@ impl<'cfg> PathSource<'cfg> { if is_dir.unwrap_or_else(|| file_path.is_dir()) { warn!(" found submodule {}", file_path.display()); - let rel = util::without_prefix(&file_path, &root).unwrap(); + let rel = util::without_prefix(&file_path, root).unwrap(); let rel = rel.to_str().chain_error(|| { human(format!("invalid utf-8 filename: {}", rel.display())) })?; diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs index 86c028029..fffdd5702 100644 --- a/src/cargo/sources/registry/index.rs +++ b/src/cargo/sources/registry/index.rs @@ -53,14 +53,14 @@ impl<'cfg> RegistryIndex<'cfg> { /// specified. pub fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> { if self.cache.contains_key(name) { - return Ok(self.cache.get(name).unwrap()); + return Ok(&self.cache[name]); } let summaries = self.load_summaries(name)?; let summaries = summaries.into_iter().filter(|summary| { summary.0.package_id().name() == name }).collect(); self.cache.insert(name.to_string(), summaries); - Ok(self.cache.get(name).unwrap()) + Ok(&self.cache[name]) } fn load_summaries(&mut self, name: &str) -> CargoResult> { @@ -96,7 +96,7 @@ impl<'cfg> RegistryIndex<'cfg> { let mut contents = String::new(); f.read_to_string(&mut contents)?; let ret: CargoResult>; - ret = contents.lines().filter(|l| l.trim().len() > 0) + ret = contents.lines().filter(|l| !l.trim().is_empty()) .map(|l| self.parse_registry_package(l)) .collect(); ret.chain_error(|| { diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs index 2b9b1ca02..e3c9b9c7b 100644 --- a/src/cargo/sources/registry/remote.rs +++ b/src/cargo/sources/registry/remote.rs @@ -88,7 +88,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { }; debug!("attempting github fast path for {}", self.source_id.url()); - if github_up_to_date(handle, &self.source_id.url(), &oid) { + if github_up_to_date(handle, self.source_id.url(), &oid) { return Ok(()) } debug!("fast path failed, falling back to a git fetch"); @@ -99,7 +99,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { let url = self.source_id.url().to_string(); let refspec = "refs/heads/*:refs/remotes/origin/*"; - git::fetch(&repo, &url, refspec, &self.config).chain_error(|| { + git::fetch(&repo, &url, refspec, self.config).chain_error(|| { human(format!("failed to fetch `{}`", url)) })?; diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs index ddf2085eb..1e682b355 100644 --- a/src/cargo/sources/replaced.rs +++ b/src/cargo/sources/replaced.rs @@ -54,7 +54,7 @@ impl<'cfg> Source for ReplacedSource<'cfg> { } fn fingerprint(&self, id: &Package) -> CargoResult { - self.inner.fingerprint(&id) + self.inner.fingerprint(id) } fn verify(&self, id: &PackageId) -> CargoResult<()> { diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs index 501480e24..abee23817 100644 --- a/src/cargo/util/config.rs +++ b/src/cargo/util/config.rs @@ -193,7 +193,7 @@ impl Config { } pub fn get_path(&self, key: &str) -> CargoResult>> { - if let Some(val) = self.get_string(&key)? { + if let Some(val) = self.get_string(key)? { let is_path = val.val.contains('/') || (cfg!(windows) && val.val.contains('\\')); let path = if is_path { @@ -373,13 +373,13 @@ impl Config { let mut contents = String::new(); file.read_to_string(&mut contents)?; let table = cargo_toml::parse(&contents, - &path, + path, self).chain_error(|| { human(format!("could not parse TOML configuration in `{}`", path.display())) })?; let toml = toml::Value::Table(table); - let value = CV::from_toml(&path, toml).chain_error(|| { + let value = CV::from_toml(path, toml).chain_error(|| { human(format!("failed to load TOML configuration from `{}`", path.display())) })?; diff --git a/src/cargo/util/dependency_queue.rs b/src/cargo/util/dependency_queue.rs index 5165adc24..1514585d9 100644 --- a/src/cargo/util/dependency_queue.rs +++ b/src/cargo/util/dependency_queue.rs @@ -85,7 +85,7 @@ impl DependencyQueue { for dep in dependencies { assert!(my_dependencies.insert(dep.clone())); let rev = self.reverse_dep_map.entry(dep.clone()) - .or_insert(HashSet::new()); + .or_insert_with(HashSet::new); assert!(rev.insert(key.clone())); } &mut slot.insert((my_dependencies, value)).1 diff --git a/src/cargo/util/errors.rs b/src/cargo/util/errors.rs index 581c98c68..c7babd0b7 100644 --- a/src/cargo/util/errors.rs +++ b/src/cargo/util/errors.rs @@ -396,14 +396,14 @@ pub fn process_error(msg: &str, if let Some(out) = output { match str::from_utf8(&out.stdout) { - Ok(s) if s.trim().len() > 0 => { + Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stdout\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } match str::from_utf8(&out.stderr) { - Ok(s) if s.trim().len() > 0 => { + Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stderr\n"); desc.push_str(s); } diff --git a/src/cargo/util/important_paths.rs b/src/cargo/util/important_paths.rs index 97e7eed1c..35161eaae 100644 --- a/src/cargo/util/important_paths.rs +++ b/src/cargo/util/important_paths.rs @@ -48,7 +48,7 @@ pub fn find_root_manifest_for_wd(manifest_path: Option, cwd: &Path) } Ok(absolute_path) }, - None => find_project_manifest(&cwd, "Cargo.toml"), + None => find_project_manifest(cwd, "Cargo.toml"), } } diff --git a/src/cargo/util/lazy_cell.rs b/src/cargo/util/lazy_cell.rs index fc751dc3c..2d42a50ec 100644 --- a/src/cargo/util/lazy_cell.rs +++ b/src/cargo/util/lazy_cell.rs @@ -58,7 +58,7 @@ impl LazyCell { where F: FnOnce() -> Result { if self.borrow().is_none() { - if let Err(_) = self.fill(init()?) { + if self.fill(init()?).is_err() { unreachable!(); } } diff --git a/src/cargo/util/template.rs b/src/cargo/util/template.rs index e15b3bda6..4ae1b9389 100644 --- a/src/cargo/util/template.rs +++ b/src/cargo/util/template.rs @@ -20,7 +20,7 @@ pub fn toml_escape_helper(h: &Helper, if let Some(param) = h.param(0) { let txt = param.value().as_string().unwrap_or("").to_owned(); let rendered = format!("{}", toml::Value::String(txt)); - try!(rc.writer.write(rendered.into_bytes().as_ref())); + try!(rc.writer.write_all(rendered.into_bytes().as_ref())); } Ok(()) } @@ -31,7 +31,7 @@ pub fn html_escape_helper(h: &Helper, rc: &mut RenderContext) -> Result<(), RenderError> { if let Some(param) = h.param(0) { let rendered = html_escape(param.value().as_string().unwrap_or("")); - try!(rc.writer.write(rendered.into_bytes().as_ref())); + try!(rc.writer.write_all(rendered.into_bytes().as_ref())); } Ok(()) } @@ -139,7 +139,7 @@ pub fn get_template_type<'a>(repo: Option<&'a str>, subdir: Option<&'a str>) -> CargoResult { match (repo, subdir) { (Some(repo_str), _) => { - if let Ok(repo_url) = Url::parse(&repo_str) { + if let Ok(repo_url) = Url::parse(repo_str) { let supported_schemes = ["git", "file", "http", "https", "ssh"]; if supported_schemes.contains(&repo_url.scheme()) { Ok(TemplateType::GitRepo(repo_url.into_string())) diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index c9ee9b245..67460fad2 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -72,25 +72,23 @@ fn try_add_file(files: &mut Vec, file: PathBuf) { } } fn try_add_files(files: &mut Vec, root: PathBuf) { - match fs::read_dir(&root) { - Ok(new) => { - files.extend(new.filter_map(|dir| { - dir.map(|d| d.path()).ok() - }).filter(|f| { - f.extension().and_then(|s| s.to_str()) == Some("rs") - }).filter(|f| { - // Some unix editors may create "dotfiles" next to original - // source files while they're being edited, but these files are - // rarely actually valid Rust source files and sometimes aren't - // even valid UTF-8. Here we just ignore all of them and require - // that they are explicitly specified in Cargo.toml if desired. - f.file_name().and_then(|s| s.to_str()).map(|s| { - !s.starts_with('.') - }).unwrap_or(true) - })) - } - Err(_) => {/* just don't add anything if the directory doesn't exist, etc. */} + if let Ok(new) = fs::read_dir(&root) { + files.extend(new.filter_map(|dir| { + dir.map(|d| d.path()).ok() + }).filter(|f| { + f.extension().and_then(|s| s.to_str()) == Some("rs") + }).filter(|f| { + // Some unix editors may create "dotfiles" next to original + // source files while they're being edited, but these files are + // rarely actually valid Rust source files and sometimes aren't + // even valid UTF-8. Here we just ignore all of them and require + // that they are explicitly specified in Cargo.toml if desired. + f.file_name().and_then(|s| s.to_str()).map(|s| { + !s.starts_with('.') + }).unwrap_or(true) + })) } + /* else just don't add anything if the directory doesn't exist, etc. */ } pub fn to_manifest(contents: &str, @@ -156,7 +154,7 @@ pub fn to_manifest(contents: &str, pub fn parse(toml: &str, file: &Path, config: &Config) -> CargoResult { - let mut first_parser = toml::Parser::new(&toml); + let mut first_parser = toml::Parser::new(toml); if let Some(toml) = first_parser.parse() { return Ok(toml); } @@ -176,7 +174,7 @@ in the future.", file.display()); return Ok(toml) } - let mut error_str = format!("could not parse input as TOML\n"); + let mut error_str = "could not parse input as TOML\n".to_string(); for error in first_parser.errors.iter() { let (loline, locol) = first_parser.to_linecol(error.lo); let (hiline, hicol) = first_parser.to_linecol(error.hi); @@ -454,8 +452,8 @@ impl TomlManifest { Some( TomlTarget { name: lib.name.clone().or(Some(project.name.clone())), - path: lib.path.clone().or( - layout.lib.as_ref().map(|p| PathValue::Path(p.clone())) + path: lib.path.clone().or_else( + || layout.lib.as_ref().map(|p| PathValue::Path(p.clone())) ), ..lib.clone() } @@ -567,7 +565,7 @@ impl TomlManifest { config: config, warnings: &mut warnings, platform: None, - layout: &layout, + layout: layout, }; fn process_dependencies( @@ -577,7 +575,7 @@ impl TomlManifest { -> CargoResult<()> { let dependencies = match new_deps { - Some(ref dependencies) => dependencies, + Some(dependencies) => dependencies, None => return Ok(()) }; for (n, v) in dependencies.iter() { @@ -625,7 +623,7 @@ impl TomlManifest { let exclude = project.exclude.clone().unwrap_or(Vec::new()); let include = project.include.clone().unwrap_or(Vec::new()); - let summary = Summary::new(pkgid, deps, self.features.clone() .unwrap_or(HashMap::new()))?; + let summary = Summary::new(pkgid, deps, self.features.clone() .unwrap_or_else(HashMap::new))?; let metadata = ManifestMetadata { description: project.description.clone(), homepage: project.homepage.clone(), @@ -780,8 +778,7 @@ impl TomlManifest { // If there is a build.rs file next to the Cargo.toml, assume it is // a build script Ok(ref e) if e.is_file() => Some(build_rs.into()), - Ok(_) => None, - Err(_) => None, + Ok(_) | Err(_) => None, } } } @@ -1099,8 +1096,8 @@ fn normalize(package_root: &Path, } let lib_target = |dst: &mut Vec, l: &TomlLibTarget| { - let path = l.path.clone().unwrap_or( - PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name()))) + let path = l.path.clone().unwrap_or_else( + || PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name()))) ); let crate_types = match l.crate_type.clone() { Some(kinds) => kinds.iter().map(|s| LibKind::from_str(s)).collect(), @@ -1122,9 +1119,10 @@ fn normalize(package_root: &Path, for bin in bins.iter() { let path = bin.path.clone().unwrap_or_else(|| { let default_bin_path = PathValue::Path(default(bin)); - match package_root.join(default_bin_path.to_path()).exists() { - true => default_bin_path, // inferred from bin's name - false => PathValue::Path(Path::new("src").join("main.rs")) + if package_root.join(default_bin_path.to_path()).exists() { + default_bin_path // inferred from bin's name + } else { + PathValue::Path(Path::new("src").join("main.rs")) } }); let mut target = Target::bin_target(&bin.name(), package_root.join(path.to_path()), diff --git a/tests/bad-manifest-path.rs b/tests/bad-manifest-path.rs index cd9a97ef5..90ba83bfc 100644 --- a/tests/bad-manifest-path.rs +++ b/tests/bad-manifest-path.rs @@ -21,7 +21,7 @@ fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { let p = project("foo"); let expected_path = manifest_path_argument - .split("/").collect::>().join("[..]"); + .split('/').collect::>().join("[..]"); assert_that(p.cargo_process(command) .arg("--manifest-path").arg(manifest_path_argument) diff --git a/tests/cargo.rs b/tests/cargo.rs index 267c6199a..b8f87325b 100644 --- a/tests/cargo.rs +++ b/tests/cargo.rs @@ -64,7 +64,7 @@ fn path() -> Vec { #[test] fn list_command_looks_at_path() { let proj = project("list-non-overlapping"); - let proj = fake_file(proj, &Path::new("path-test"), "cargo-1", FakeKind::Executable); + let proj = fake_file(proj, Path::new("path-test"), "cargo-1", FakeKind::Executable); let mut pr = cargo_process(); let mut path = path(); @@ -84,7 +84,7 @@ fn list_command_resolves_symlinks() { use cargotest::support::cargo_dir; let proj = project("list-non-overlapping"); - let proj = fake_file(proj, &Path::new("path-test"), "cargo-2", + let proj = fake_file(proj, Path::new("path-test"), "cargo-2", FakeKind::Symlink{target:&cargo_dir().join("cargo")}); let mut pr = cargo_process(); diff --git a/tests/cfg.rs b/tests/cfg.rs index 8050000f8..0ec073b17 100644 --- a/tests/cfg.rs +++ b/tests/cfg.rs @@ -205,7 +205,7 @@ fn works_through_the_registry() { .publish(); let p = project("a") - .file("Cargo.toml", &r#" + .file("Cargo.toml", r#" [package] name = "a" version = "0.0.1" @@ -263,7 +263,7 @@ fn ignore_version_from_other_platform() { #[test] fn bad_target_spec() { let p = project("a") - .file("Cargo.toml", &r#" + .file("Cargo.toml", r#" [package] name = "a" version = "0.0.1" @@ -289,7 +289,7 @@ Caused by: #[test] fn bad_target_spec2() { let p = project("a") - .file("Cargo.toml", &r#" + .file("Cargo.toml", r#" [package] name = "a" version = "0.0.1" diff --git a/tests/git.rs b/tests/git.rs index e1aff7240..29b100ed8 100644 --- a/tests/git.rs +++ b/tests/git.rs @@ -912,7 +912,7 @@ fn dep_with_changed_submodule() { let repo = git2::Repository::open(&git_project.root()).unwrap(); let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), - &Path::new("src")); + Path::new("src")); git::commit(&repo); let project = project @@ -1651,7 +1651,7 @@ fn dont_require_submodules_are_checked_out() { let repo = git2::Repository::open(&git1.root()).unwrap(); let url = path2url(git2.root()).to_string(); - git::add_submodule(&repo, &url, &Path::new("a/submodule")); + git::add_submodule(&repo, &url, Path::new("a/submodule")); git::commit(&repo); git2::Repository::init(&project.root()).unwrap(); diff --git a/tests/init.rs b/tests/init.rs index cd6411892..c13d57d73 100644 --- a/tests/init.rs +++ b/tests/init.rs @@ -15,7 +15,7 @@ use tempdir::TempDir; fn cargo_process(s: &str) -> ProcessBuilder { let mut p = cargotest::process(&cargo_dir().join("cargo")); p.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); - return p; + p } #[test] @@ -57,7 +57,7 @@ fn simple_bin() { #[test] fn both_lib_and_bin() { let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("init").arg("--lib").arg("--bin").cwd(td.path().clone()) + assert_that(cargo_process("init").arg("--lib").arg("--bin").cwd(td.path()) .env("USER", "foo"), execs().with_status(101).with_stderr( "[ERROR] can't specify both lib and binary outputs")); @@ -427,7 +427,7 @@ Usage: fn no_filename() { assert_that(cargo_process("init").arg("/"), execs().with_status(101) - .with_stderr(&format!("\ + .with_stderr("\ [ERROR] cannot auto-detect project name from path \"/\" ; use --name to override -"))); +".to_string())); } diff --git a/tests/install.rs b/tests/install.rs index cc457eb33..22f9af060 100644 --- a/tests/install.rs +++ b/tests/install.rs @@ -16,7 +16,7 @@ use hamcrest::{assert_that, is_not}; fn cargo_process(s: &str) -> ProcessBuilder { let mut p = cargotest::cargo_process(); p.arg(s); - return p + p } fn pkg(name: &str, vers: &str) { diff --git a/tests/new.rs b/tests/new.rs index 7809f4892..2e9a770b0 100644 --- a/tests/new.rs +++ b/tests/new.rs @@ -16,7 +16,7 @@ use tempdir::TempDir; fn cargo_process(s: &str) -> ProcessBuilder { let mut p = cargotest::cargo_process(); p.arg(s); - return p; + p } #[test] @@ -127,7 +127,7 @@ authors = ["{{author}}"] #[test] fn both_lib_and_bin() { let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("--lib").arg("--bin").arg("foo").cwd(td.path().clone()) + assert_that(cargo_process("new").arg("--lib").arg("--bin").arg("foo").cwd(td.path()) .env("USER", "foo"), execs().with_status(101).with_stderr( "[ERROR] can't specify both lib and binary outputs")); @@ -136,7 +136,7 @@ fn both_lib_and_bin() { #[test] fn simple_git() { let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("--lib").arg("foo").cwd(td.path().clone()) + assert_that(cargo_process("new").arg("--lib").arg("foo").cwd(td.path()) .env("USER", "foo"), execs().with_status(0)); @@ -146,7 +146,7 @@ fn simple_git() { assert_that(&td.path().join("foo/.git"), existing_dir()); assert_that(&td.path().join("foo/.gitignore"), existing_file()); - assert_that(cargo_process("build").cwd(&td.path().clone().join("foo")), + assert_that(cargo_process("build").cwd(&td.path().join("foo")), execs().with_status(0)); } @@ -237,7 +237,7 @@ fn finds_author_user() { // the hierarchy let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo").env("USER", "foo") - .cwd(td.path().clone()), + .cwd(td.path()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); @@ -252,7 +252,7 @@ fn finds_author_user_escaped() { // the hierarchy let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo").env("USER", "foo \"bar\"") - .cwd(td.path().clone()), + .cwd(td.path()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); @@ -269,7 +269,7 @@ fn finds_author_username() { assert_that(cargo_process("new").arg("foo") .env_remove("USER") .env("USERNAME", "foo") - .cwd(td.path().clone()), + .cwd(td.path()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); @@ -288,7 +288,7 @@ fn finds_author_priority() { .env("EMAIL", "baz2") .env("CARGO_NAME", "bar") .env("CARGO_EMAIL", "baz") - .cwd(td.path().clone()), + .cwd(td.path()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); @@ -305,7 +305,7 @@ fn finds_author_email() { assert_that(cargo_process("new").arg("foo") .env("USER", "bar") .env("EMAIL", "baz") - .cwd(td.path().clone()), + .cwd(td.path()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); @@ -335,7 +335,7 @@ fn finds_git_email() { assert_that(cargo_process("new").arg("foo") .env("GIT_AUTHOR_NAME", "foo") .env("GIT_AUTHOR_EMAIL", "gitfoo") - .cwd(td.path().clone()), + .cwd(td.path()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); @@ -353,7 +353,7 @@ fn finds_git_author() { assert_that(cargo_process("new").arg("foo") .env_remove("USER") .env("GIT_COMMITTER_NAME", "gitfoo") - .cwd(td.path().clone()), + .cwd(td.path()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); diff --git a/tests/package.rs b/tests/package.rs index 0e03df5cf..eee1f588f 100644 --- a/tests/package.rs +++ b/tests/package.rs @@ -360,9 +360,9 @@ fn no_duplicates_from_modified_tracked_files() { fn main() {} "#); p.build(); - File::create(p.root().join("src/main.rs")).unwrap().write_all(r#" + File::create(p.root().join("src/main.rs")).unwrap().write_all(br#" fn main() { println!("A change!"); } - "#.as_bytes()).unwrap(); + "#).unwrap(); let mut cargo = cargo_process(); cargo.cwd(p.root()); assert_that(cargo.clone().arg("build"), execs().with_status(0)); @@ -476,9 +476,9 @@ fn repackage_on_source_change() { panic!("could not create file {}: {}", p.root().join("src/foo.rs").display(), e) }); - file.write_all(r#" + file.write_all(br#" fn main() { println!("foo"); } - "#.as_bytes()).unwrap(); + "#).unwrap(); std::mem::drop(file); let mut pro = process(&cargo_dir().join("cargo")); diff --git a/tests/publish.rs b/tests/publish.rs index fd3558a4e..cb716cfe8 100644 --- a/tests/publish.rs +++ b/tests/publish.rs @@ -26,10 +26,10 @@ fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } fn setup() { let config = paths::root().join(".cargo/config"); t!(fs::create_dir_all(config.parent().unwrap())); - t!(t!(File::create(&config)).write_all(&format!(r#" + t!(t!(File::create(&config)).write_all(br#" [registry] token = "api-token" - "#).as_bytes())); + "#)); t!(fs::create_dir_all(&upload_path().join("api/v1/crates"))); repo(®istry_path()) diff --git a/tests/resolve.rs b/tests/resolve.rs index 3e697e56a..873220848 100644 --- a/tests/resolve.rs +++ b/tests/resolve.rs @@ -18,9 +18,7 @@ fn resolve(pkg: PackageId, deps: Vec, -> CargoResult> { let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap(); let method = Method::Everything; - Ok(resolver::resolve(&[(summary, method)], &[], registry)?.iter().map(|p| { - p.clone() - }).collect()) + Ok(resolver::resolve(&[(summary, method)], &[], registry)?.iter().cloned().collect()) } trait ToDep { diff --git a/tests/rustflags.rs b/tests/rustflags.rs index 2e1ce73ae..a03f75290 100644 --- a/tests/rustflags.rs +++ b/tests/rustflags.rs @@ -189,7 +189,7 @@ fn env_rustflags_normal_source_with_target() { #[bench] fn run1(_ben: &mut test::Bencher) { }"#); p.build(); - let ref host = rustc_host(); + let host = &rustc_host(); // Use RUSTFLAGS to pass an argument that will generate an error assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")