Auto merge of #5110 - alexcrichton:reset-harder, r=matklad

Try a lot harder to recover corrupt git repos

We've received a lot of intermittent bug reports historically about corrupt git
repositories. These inevitably happens as Cargo is ctrl-c'd or for whatever
other reason, and to provide a better user experience Cargo strives to
automatically handle these situations by blowing away the old checkout for a new
update.

This commit adds a new test which attempts to pathologically corrupt a git
database and checkout in an attempt to expose bugs in Cargo. Sure enough there
were some more locations that we needed to handle gracefully for corrupt git
checkouts. Notable inclusions were:

* The `fetch` operation in libgit2 would fail due to corrupt references. This
  starts by adding an explicit whitelist for classes of errors coming out of
  `fetch` to auto-retry by blowing away the repository. We need to be super
  careful here as network errors commonly come out of this function and we don't
  want to too aggressively re-clone.

* After a `fetch` succeeded a repository could fail to actual resolve a
  git reference to the actual revision we want. This indicated that we indeed
  needed to blow everything away and re-clone entirely again.

* When creating a checkout from a database the `reset` operation might fail due
  to a corrupt local database of the checkout itself. If this happens we needed
  to just blow it away and try again.

There's likely more lurking situations where we need to re-clone but I figure we
can discover those over time.
This commit is contained in:
bors 2018-03-02 23:37:29 +00:00
commit 4429f4efa1
13 changed files with 398 additions and 110 deletions

View file

@ -34,7 +34,7 @@ matrix:
script:
- cargo test
- cargo doc --no-deps
- (cd src/doc && mdbook build --no-create --dest-dir ../../target/doc)
- (cd src/doc && mdbook build --dest-dir ../../target/doc)
exclude:
- rust: stable

View file

@ -5,6 +5,7 @@ use std::path::Path;
use core::{Profiles, Workspace};
use util::Config;
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
use ops::{self, Context, BuildConfig, Kind, Unit};
pub struct CleanOptions<'a> {
@ -99,14 +100,14 @@ fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
fs::remove_dir_all(path).chain_err(|| {
paths::remove_dir_all(path).chain_err(|| {
format_err!("could not remove build directory")
})?;
} else if m.is_ok() {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
fs::remove_file(path).chain_err(|| {
paths::remove_file(path).chain_err(|| {
format_err!("failed to remove build artifact")
})?;
}
Ok(())
}
}

View file

@ -17,6 +17,7 @@ use sources::{GitSource, PathSource, SourceConfigMap};
use util::{Config, internal};
use util::{Filesystem, FileLock};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
#[derive(Deserialize, Serialize)]
#[serde(untagged)]
@ -47,7 +48,7 @@ impl Transaction {
impl Drop for Transaction {
fn drop(&mut self) {
for bin in self.bins.iter() {
let _ = fs::remove_file(bin);
let _ = paths::remove_file(bin);
}
}
}
@ -319,7 +320,7 @@ fn install_one(root: &Filesystem,
// Don't bother grabbing a lock as we're going to blow it all away
// anyway.
let target_dir = ws.target_dir().into_path_unlocked();
fs::remove_dir_all(&target_dir)?;
paths::remove_dir_all(&target_dir)?;
}
Ok(())
@ -667,7 +668,7 @@ pub fn uninstall_one(root: &Filesystem,
write_crate_list(&crate_metadata, metadata)?;
for bin in to_remove {
config.shell().status("Removing", bin.display())?;
fs::remove_file(bin)?;
paths::remove_file(bin)?;
}
Ok(())

View file

@ -304,8 +304,8 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
let f = GzDecoder::new(tar.file());
let dst = tar.parent().join(&format!("{}-{}", pkg.name(), pkg.version()));
if fs::metadata(&dst).is_ok() {
fs::remove_dir_all(&dst)?;
if dst.exists() {
paths::remove_dir_all(&dst)?;
}
let mut archive = Archive::new(f);
archive.unpack(dst.parent().unwrap())?;

View file

@ -15,6 +15,7 @@ use core::manifest::Lto;
use core::shell::ColorChoice;
use util::{self, ProcessBuilder, machine_message};
use util::{Config, internal, profile, join_paths};
use util::paths;
use util::errors::{CargoResult, CargoResultExt, Internal};
use util::Freshness;
@ -385,9 +386,7 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
if filename.extension() == Some(OsStr::new("rmeta")) {
let dst = root.join(filename).with_extension("rlib");
if dst.exists() {
fs::remove_file(&dst).chain_err(|| {
format!("Could not remove file: {}.", dst.display())
})?;
paths::remove_file(&dst)?;
}
}
}
@ -541,9 +540,7 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
continue
}
if dst.exists() {
fs::remove_file(&dst).chain_err(|| {
format!("failed to remove: {}", dst.display())
})?;
paths::remove_file(&dst)?;
}
let link_result = if src.is_dir() {

View file

@ -1,10 +1,11 @@
use std::collections::HashSet;
use std::io::{Write, BufWriter, ErrorKind};
use std::fs::{self, File};
use std::io::{Write, BufWriter};
use std::fs::File;
use std::path::{Path, PathBuf};
use ops::{Context, Unit};
use util::{CargoResult, internal};
use util::paths;
use ops::cargo_rustc::fingerprint;
fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResult<String> {
@ -81,13 +82,12 @@ pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) ->
write!(outfile, " {}", render_filename(dep, basedir)?)?;
}
writeln!(outfile, "")?;
} else if let Err(err) = fs::remove_file(output_path) {
// dep-info generation failed, so delete output file. This will usually
// cause the build system to always rerun the build rule, which is correct
// if inefficient.
if err.kind() != ErrorKind::NotFound {
return Err(err.into());
}
// dep-info generation failed, so delete output file. This will
// usually cause the build system to always rerun the build
// rule, which is correct if inefficient.
} else if output_path.exists() {
paths::remove_file(output_path)?;
}
}
}

View file

@ -6,7 +6,7 @@ use core::source::{Source, SourceId};
use core::GitReference;
use core::{Package, PackageId, Summary, Registry, Dependency};
use util::Config;
use util::errors::{CargoResult, Internal};
use util::errors::CargoResult;
use util::hex::short_hash;
use sources::PathSource;
use sources::git::utils::{GitRemote, GitRevision};
@ -170,9 +170,7 @@ impl<'cfg> Source for GitSource<'cfg> {
trace!("updating git source `{:?}`", self.remote);
let repo = self.remote.checkout(&db_path, self.config)?;
let rev = repo.rev_for(&self.reference).map_err(Internal::new)?;
(repo, rev)
self.remote.checkout(&db_path, &self.reference, self.config)?
} else {
(self.remote.db_at(&db_path)?, actual_rev.unwrap())
};

View file

@ -12,6 +12,7 @@ use url::Url;
use core::GitReference;
use util::{ToUrl, internal, Config, network, Progress};
use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError};
#[derive(PartialEq, Clone, Debug)]
@ -87,30 +88,40 @@ impl GitRemote {
pub fn rev_for(&self, path: &Path, reference: &GitReference)
-> CargoResult<GitRevision> {
let db = self.db_at(path)?;
db.rev_for(reference)
reference.resolve(&self.db_at(path)?.repo)
}
pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult<GitDatabase> {
let repo = match git2::Repository::open(into) {
Ok(mut repo) => {
self.fetch_into(&mut repo, cargo_config).chain_err(|| {
format!("failed to fetch into {}", into.display())
})?;
repo
pub fn checkout(&self,
into: &Path,
reference: &GitReference,
cargo_config: &Config)
-> CargoResult<(GitDatabase, GitRevision)>
{
let mut repo_and_rev = None;
if let Ok(mut repo) = git2::Repository::open(into) {
self.fetch_into(&mut repo, cargo_config).chain_err(|| {
format!("failed to fetch into {}", into.display())
})?;
if let Ok(rev) = reference.resolve(&repo) {
repo_and_rev = Some((repo, rev));
}
Err(..) => {
self.clone_into(into, cargo_config).chain_err(|| {
}
let (repo, rev) = match repo_and_rev {
Some(pair) => pair,
None => {
let repo = self.clone_into(into, cargo_config).chain_err(|| {
format!("failed to clone into: {}", into.display())
})?
})?;
let rev = reference.resolve(&repo)?;
(repo, rev)
}
};
Ok(GitDatabase {
Ok((GitDatabase {
remote: self.clone(),
path: into.to_path_buf(),
repo,
})
}, rev))
}
pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
@ -130,7 +141,7 @@ impl GitRemote {
fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult<git2::Repository> {
if fs::metadata(&dst).is_ok() {
fs::remove_dir_all(dst)?;
paths::remove_dir_all(dst)?;
}
fs::create_dir_all(dst)?;
let mut repo = git2::Repository::init_bare(dst)?;
@ -142,56 +153,32 @@ impl GitRemote {
impl GitDatabase {
pub fn copy_to(&self, rev: GitRevision, dest: &Path, cargo_config: &Config)
-> CargoResult<GitCheckout> {
let checkout = match git2::Repository::open(dest) {
Ok(repo) => {
let mut checkout = GitCheckout::new(dest, self, rev, repo);
if !checkout.is_fresh() {
checkout.fetch(cargo_config)?;
checkout.reset(cargo_config)?;
assert!(checkout.is_fresh());
let mut checkout = None;
if let Ok(repo) = git2::Repository::open(dest) {
let mut co = GitCheckout::new(dest, self, rev.clone(), repo);
if !co.is_fresh() {
// After a successful fetch operation do a sanity check to
// ensure we've got the object in our database to reset to. This
// can fail sometimes for corrupt repositories where the fetch
// operation succeeds but the object isn't actually there.
co.fetch(cargo_config)?;
if co.has_object() {
co.reset(cargo_config)?;
assert!(co.is_fresh());
checkout = Some(co);
}
checkout
} else {
checkout = Some(co);
}
Err(..) => GitCheckout::clone_into(dest, self, rev, cargo_config)?,
};
let checkout = match checkout {
Some(c) => c,
None => GitCheckout::clone_into(dest, self, rev, cargo_config)?,
};
checkout.update_submodules(cargo_config)?;
Ok(checkout)
}
pub fn rev_for(&self, reference: &GitReference) -> CargoResult<GitRevision> {
let id = match *reference {
GitReference::Tag(ref s) => {
(|| -> CargoResult<git2::Oid> {
let refname = format!("refs/tags/{}", s);
let id = self.repo.refname_to_id(&refname)?;
let obj = self.repo.find_object(id, None)?;
let obj = obj.peel(ObjectType::Commit)?;
Ok(obj.id())
})().chain_err(|| {
format!("failed to find tag `{}`", s)
})?
}
GitReference::Branch(ref s) => {
(|| {
let b = self.repo.find_branch(s, git2::BranchType::Local)?;
b.get().target().ok_or_else(|| {
format_err!("branch `{}` did not have a target", s)
})
})().chain_err(|| {
format!("failed to find branch `{}`", s)
})?
}
GitReference::Rev(ref s) => {
let obj = self.repo.revparse_single(s)?;
match obj.as_tag() {
Some(tag) => tag.target_id(),
None => obj.id(),
}
}
};
Ok(GitRevision(id))
}
pub fn to_short_id(&self, revision: GitRevision) -> CargoResult<GitShortID> {
let obj = self.repo.find_object(revision.0, None)?;
Ok(GitShortID(obj.short_id()?))
@ -203,6 +190,42 @@ impl GitDatabase {
}
}
impl GitReference {
fn resolve(&self, repo: &git2::Repository) -> CargoResult<GitRevision> {
let id = match *self {
GitReference::Tag(ref s) => {
(|| -> CargoResult<git2::Oid> {
let refname = format!("refs/tags/{}", s);
let id = repo.refname_to_id(&refname)?;
let obj = repo.find_object(id, None)?;
let obj = obj.peel(ObjectType::Commit)?;
Ok(obj.id())
})().chain_err(|| {
format!("failed to find tag `{}`", s)
})?
}
GitReference::Branch(ref s) => {
(|| {
let b = repo.find_branch(s, git2::BranchType::Local)?;
b.get().target().ok_or_else(|| {
format_err!("branch `{}` did not have a target", s)
})
})().chain_err(|| {
format!("failed to find branch `{}`", s)
})?
}
GitReference::Rev(ref s) => {
let obj = repo.revparse_single(s)?;
match obj.as_tag() {
Some(tag) => tag.target_id(),
None => obj.id(),
}
}
};
Ok(GitRevision(id))
}
}
impl<'a> GitCheckout<'a> {
fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision,
repo: git2::Repository)
@ -227,9 +250,7 @@ impl<'a> GitCheckout<'a> {
format!("Couldn't mkdir {}", dirname.display())
})?;
if into.exists() {
fs::remove_dir_all(into).chain_err(|| {
format!("Couldn't rmdir {}", into.display())
})?;
paths::remove_dir_all(into)?;
}
// we're doing a local filesystem-to-filesystem clone so there should
@ -285,6 +306,10 @@ impl<'a> GitCheckout<'a> {
Ok(())
}
fn has_object(&self) -> bool {
self.repo.find_object(self.revision.0, None).is_ok()
}
fn reset(&self, config: &Config) -> CargoResult<()> {
// If we're interrupted while performing this reset (e.g. we die because
// of a signal) Cargo needs to be sure to try to check out this repo
@ -295,7 +320,7 @@ impl<'a> GitCheckout<'a> {
// ready to go. Hence if we start to do a reset, we make sure this file
// *doesn't* exist, and then once we're done we create the file.
let ok_file = self.location.join(".cargo-ok");
let _ = fs::remove_file(&ok_file);
let _ = paths::remove_file(&ok_file);
info!("reset {} to {}", self.repo.path().display(), self.revision);
let object = self.repo.find_object(self.revision.0, None)?;
reset(&self.repo, &object, config)?;
@ -351,7 +376,7 @@ impl<'a> GitCheckout<'a> {
}
Err(..) => {
let path = parent.workdir().unwrap().join(child.path());
let _ = fs::remove_dir_all(&path);
let _ = paths::remove_dir_all(&path);
git2::Repository::init(&path)?
}
};
@ -644,10 +669,40 @@ pub fn fetch(repo: &mut git2::Repository,
maybe_gc_repo(repo)?;
debug!("doing a fetch for {}", url);
with_fetch_options(&repo.config()?, url, config, &mut |mut opts| {
debug!("initiating fetch of {} from {}", refspec, url);
let mut remote = repo.remote_anonymous(url.as_str())?;
remote.fetch(&[refspec], Some(&mut opts), None)?;
let git_config = git2::Config::open_default()?;
with_fetch_options(&git_config, url, config, &mut |mut opts| {
// The `fetch` operation here may fail spuriously due to a corrupt
// repository. It could also fail, however, for a whole slew of other
// reasons (aka network related reasons). We want Cargo to automatically
// recover from corrupt repositories, but we don't want Cargo to stomp
// over other legitimate errors.o
//
// Consequently we save off the error of the `fetch` operation and if it
// looks like a "corrupt repo" error then we blow away the repo and try
// again. If it looks like any other kind of error, or if we've already
// blown away the repository, then we want to return the error as-is.
let mut repo_reinitialized = false;
loop {
debug!("initiating fetch of {} from {}", refspec, url);
let res = repo.remote_anonymous(url.as_str())?
.fetch(&[refspec], Some(&mut opts), None);
let err = match res {
Ok(()) => break,
Err(e) => e,
};
debug!("fetch failed: {}", err);
if !repo_reinitialized && err.class() == git2::ErrorClass::Reference {
repo_reinitialized = true;
debug!("looks like this is a corrupt repository, reinitializing \
and trying again");
if reinitialize(repo).is_ok() {
continue
}
}
return Err(err.into())
}
Ok(())
})
}
@ -703,30 +758,33 @@ fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> {
}
// Alright all else failed, let's start over.
//
reinitialize(repo)
}
fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> {
// Here we want to drop the current repository object pointed to by `repo`,
// so we initialize temporary repository in a sub-folder, blow away the
// existing git folder, and then recreate the git repo. Finally we blow away
// the `tmp` folder we allocated.
let path = repo.path().to_path_buf();
debug!("reinitializing git repo at {:?}", path);
let tmp = path.join("tmp");
mem::replace(repo, git2::Repository::init(&tmp)?);
let bare = !repo.path().ends_with(".git");
*repo = git2::Repository::init(&tmp)?;
for entry in path.read_dir()? {
let entry = entry?;
if entry.file_name().to_str() == Some("tmp") {
continue
}
let path = entry.path();
drop(fs::remove_file(&path).or_else(|_| fs::remove_dir_all(&path)));
drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
}
if repo.is_bare() {
mem::replace(repo, git2::Repository::init_bare(path)?);
if bare {
*repo = git2::Repository::init_bare(path)?;
} else {
mem::replace(repo, git2::Repository::init(path)?);
*repo = git2::Repository::init(path)?;
}
fs::remove_dir_all(&tmp).chain_err(|| {
format!("failed to remove {:?}", tmp)
})?;
paths::remove_dir_all(&tmp)?;
Ok(())
}

View file

@ -9,6 +9,7 @@ use fs2::{FileExt, lock_contended_error};
use libc;
use util::Config;
use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError};
pub struct FileLock {
@ -49,7 +50,7 @@ impl FileLock {
///
/// This can be useful if a directory is locked with a sentinel file but it
/// needs to be cleared out as it may be corrupt.
pub fn remove_siblings(&self) -> io::Result<()> {
pub fn remove_siblings(&self) -> CargoResult<()> {
let path = self.path();
for entry in path.parent().unwrap().read_dir()? {
let entry = entry?;
@ -58,9 +59,9 @@ impl FileLock {
}
let kind = entry.file_type()?;
if kind.is_dir() {
fs::remove_dir_all(entry.path())?;
paths::remove_dir_all(entry.path())?;
} else {
fs::remove_file(entry.path())?;
paths::remove_file(entry.path())?;
}
}
Ok(())

View file

@ -1,7 +1,7 @@
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs::File;
use std::fs::OpenOptions;
use std::fs::{self, File, OpenOptions};
use std::io;
use std::io::prelude::*;
use std::path::{Path, PathBuf, Component};
@ -188,3 +188,72 @@ impl<'a> Iterator for PathAncestors<'a> {
}
}
}
pub fn remove_dir_all<P: AsRef<Path>>(p: P) -> CargoResult<()> {
_remove_dir_all(p.as_ref())
}
fn _remove_dir_all(p: &Path) -> CargoResult<()> {
if p.symlink_metadata()?.file_type().is_symlink() {
return remove_file(p)
}
let entries = p.read_dir().chain_err(|| {
format!("failed to read directory `{}`", p.display())
})?;
for entry in entries {
let entry = entry?;
let path = entry.path();
if entry.file_type()?.is_dir() {
remove_dir_all(&path)?;
} else {
remove_file(&path)?;
}
}
remove_dir(&p)
}
pub fn remove_dir<P: AsRef<Path>>(p: P) -> CargoResult<()> {
_remove_dir(p.as_ref())
}
fn _remove_dir(p: &Path) -> CargoResult<()> {
fs::remove_dir(p).chain_err(|| {
format!("failed to remove directory `{}`", p.display())
})?;
Ok(())
}
pub fn remove_file<P: AsRef<Path>>(p: P) -> CargoResult<()> {
_remove_file(p.as_ref())
}
fn _remove_file(p: &Path) -> CargoResult<()> {
let mut err = match fs::remove_file(p) {
Ok(()) => return Ok(()),
Err(e) => e,
};
if err.kind() == io::ErrorKind::PermissionDenied {
if set_not_readonly(p).unwrap_or(false) {
match fs::remove_file(p) {
Ok(()) => return Ok(()),
Err(e) => err = e,
}
}
}
Err(err).chain_err(|| {
format!("failed to remove file `{}`", p.display())
})?;
Ok(())
}
fn set_not_readonly(p: &Path) -> io::Result<bool> {
let mut perms = p.metadata()?.permissions();
if !perms.readonly() {
return Ok(false)
}
perms.set_readonly(false);
fs::set_permissions(p, perms)?;
Ok(true)
}

View file

@ -0,0 +1,161 @@
use std::fs;
use std::path::{Path, PathBuf};
use cargo::util::paths as cargopaths;
use cargotest::support::paths;
use cargotest::support::{git, project, execs};
use hamcrest::assert_that;
#[test]
fn deleting_database_files() {
let project = project("foo");
let git_project = git::new("bar", |project| {
project
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = []
"#)
.file("src/lib.rs", "")
}).unwrap();
let project = project
.file("Cargo.toml", &format!(r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dependencies]
bar = {{ git = '{}' }}
"#, git_project.url()))
.file("src/lib.rs", "")
.build();
assert_that(project.cargo("build"), execs().with_status(0));
let mut files = Vec::new();
find_files(&paths::home().join(".cargo/git/db"), &mut files);
assert!(files.len() > 0);
let log = "cargo::sources::git=trace";
for file in files {
if !file.exists() {
continue
}
println!("deleting {}", file.display());
cargopaths::remove_file(&file).unwrap();
assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
execs().with_status(0));
if !file.exists() {
continue
}
println!("truncating {}", file.display());
make_writable(&file);
fs::OpenOptions::new()
.write(true)
.open(&file)
.unwrap()
.set_len(2)
.unwrap();
assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
execs().with_status(0));
}
}
#[test]
fn deleting_checkout_files() {
let project = project("foo");
let git_project = git::new("bar", |project| {
project
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = []
"#)
.file("src/lib.rs", "")
}).unwrap();
let project = project
.file("Cargo.toml", &format!(r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dependencies]
bar = {{ git = '{}' }}
"#, git_project.url()))
.file("src/lib.rs", "")
.build();
assert_that(project.cargo("build"), execs().with_status(0));
let dir = paths::home()
.join(".cargo/git/checkouts")
// get the first entry in the checkouts dir for the package's location
.read_dir()
.unwrap()
.next()
.unwrap()
.unwrap()
.path()
// get the first child of that checkout dir for our checkout
.read_dir()
.unwrap()
.next()
.unwrap()
.unwrap()
.path()
// and throw on .git to corrupt things
.join(".git");
let mut files = Vec::new();
find_files(&dir, &mut files);
assert!(files.len() > 0);
let log = "cargo::sources::git=trace";
for file in files {
if !file.exists() {
continue
}
println!("deleting {}", file.display());
cargopaths::remove_file(&file).unwrap();
assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
execs().with_status(0));
if !file.exists() {
continue
}
println!("truncating {}", file.display());
make_writable(&file);
fs::OpenOptions::new()
.write(true)
.open(&file)
.unwrap()
.set_len(2)
.unwrap();
assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
execs().with_status(0));
}
}
fn make_writable(path: &Path) {
let mut p = path.metadata().unwrap().permissions();
p.set_readonly(false);
fs::set_permissions(path, p).unwrap();
}
fn find_files(path: &Path, dst: &mut Vec<PathBuf>) {
for e in path.read_dir().unwrap() {
let e = e.unwrap();
let path = e.path();
if e.file_type().unwrap().is_dir() {
find_files(&path, dst);
} else {
dst.push(path);
}
}
}

View file

@ -855,7 +855,8 @@ git = git_project.url(), dir = p.url())));
[UPDATING] git repository [..]
[ERROR] Unable to update [..]
To learn more, run the command again with --verbose.
Caused by:
revspec '0.1.2' not found; [..]
"));
// Specifying a precise rev to the old rev shouldn't actually update

View file

@ -38,6 +38,7 @@ mod check;
mod clean;
mod concurrent;
mod config;
mod corrupt_git;
mod cross_compile;
mod cross_publish;
mod death;