diff --git a/Cargo.lock b/Cargo.lock index b70a7fff6..7183efecf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,25 +3,27 @@ name = "cargo" version = "0.1.0" dependencies = [ "advapi32-sys 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.39 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "git2-curl 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "glob 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "git2-curl 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "glob 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.0 (git+https://github.com/carllerche/hamcrest-rust.git)", - "log 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", "registry 0.1.0", - "rustc-serialize 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", - "tar 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", - "term 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tar 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "threadpool 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", + "url 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -29,7 +31,7 @@ name = "advapi32-sys" version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "winapi 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -39,24 +41,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "curl" -version = "0.1.17" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "url 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" -version = "0.1.10" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -65,7 +67,7 @@ version = "0.6.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "regex 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -73,65 +75,67 @@ name = "env_logger" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "log 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "flate2" -version = "0.1.8" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "miniz-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "gcc" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "gcc" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "gcc" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "git2" -version = "0.1.21" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "url 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "git2-curl" -version = "0.1.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "url 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "glob" -version = "0.1.9" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "hamcrest" version = "0.1.0" -source = "git+https://github.com/carllerche/hamcrest-rust.git#af4418fea7d8cdba6469d43ff45a50dd5f7ad357" +source = "git+https://github.com/carllerche/hamcrest-rust.git#c90897390602ef1ad29de8988cc8b0ed23b540cd" [[package]] name = "kernel32-sys" -version = "0.0.9" +version = "0.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "winapi 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -141,13 +145,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libgit2-sys" -version = "0.1.14" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libssh2-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libssh2-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -160,7 +165,7 @@ dependencies = [ [[package]] name = "libssh2-sys" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -179,7 +184,7 @@ dependencies = [ [[package]] name = "log" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -189,10 +194,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "miniz-sys" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -206,11 +211,6 @@ dependencies = [ "pkg-config 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "pkg-config" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "pkg-config" version = "0.2.2" @@ -235,13 +235,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "registry" version = "0.1.0" dependencies = [ - "curl 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rustc-serialize" -version = "0.2.15" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -251,48 +251,53 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "tar" -version = "0.1.11" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "term" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "kernel32-sys 0.0.9 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "threadpool" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "time" -version = "0.1.17" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "toml" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rustc-serialize 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "url" -version = "0.2.21" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "winapi" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index 2517dc562..7117fc948 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,20 +12,22 @@ path = "src/cargo/lib.rs" [dependencies] toml = "0.1" semver = "0.1" -curl = "0.1" -tar = "0.1" -flate2 = "0.1" -git2 = "0.1" -git2-curl = "0.1" -glob = "0.1" +curl = "0.2" +tar = "0.2" +flate2 = "0.2" +git2 = "0.2" +git2-curl = "0.2" +glob = "0.2" time = "0.1" log = "0.2" env_logger = "0.2" docopt = "0.6" url = "0.2" -rustc-serialize = "0.2" +rustc-serialize = "0.3" term = "0.1" regex = "0.1" +threadpool = "0.1" +libc = "0.1" [target.i686-pc-windows-gnu.dependencies] winapi = "0.1" diff --git a/Makefile.in b/Makefile.in index 99ac735e4..1cc62e48e 100644 --- a/Makefile.in +++ b/Makefile.in @@ -70,7 +70,7 @@ $(foreach target,$(CFG_TARGET),$(eval $(call DIST_TARGET,$(target)))) ifdef CFG_LOCAL_CARGO CARGO := $(CFG_LOCAL_CARGO) else -CARGO := $(TARGET_ROOT)/snapshot/bin/cargo$(X) +CARGO := $(TARGET_ROOT)/snapshot/cargo/bin/cargo$(X) endif all: $(foreach target,$(CFG_TARGET),cargo-$(target)) @@ -82,11 +82,12 @@ cargo-$(1): $$(CARGO) $$(CARGO) build --target $(1) $$(OPT_FLAG) $$(VERBOSE_FLAG) $$(ARGS) test-unit-$(1): $$(CARGO) + @mkdir -p target/$(1)/cit $$(CARGO) test --target $(1) $$(VERBOSE_FLAG) $$(only) endef $(foreach target,$(CFG_TARGET),$(eval $(call CARGO_TARGET,$(target)))) -$(TARGET_ROOT)/snapshot/bin/cargo$(X): src/snapshots.txt +$(TARGET_ROOT)/snapshot/cargo/bin/cargo$(X): src/snapshots.txt $(CFG_PYTHON) src/etc/dl-snapshot.py $(CFG_BUILD) touch $@ diff --git a/src/bin/bench.rs b/src/bin/bench.rs index 8db4aa930..00edc0051 100644 --- a/src/bin/bench.rs +++ b/src/bin/bench.rs @@ -1,5 +1,3 @@ -use std::old_io::process::ExitStatus; - use cargo::ops; use cargo::util::{CliResult, CliError, Human, Config}; use cargo::util::important_paths::{find_root_manifest_for_cwd}; @@ -76,9 +74,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { match err { None => Ok(None), Some(err) => { - Err(match err.exit { - Some(ExitStatus(i)) => CliError::new("", i as i32), - _ => CliError::from_boxed(box Human(err), 101) + Err(match err.exit.as_ref().and_then(|c| c.code()) { + Some(i) => CliError::new("", i), + None => CliError::from_error(Human(err), 101) }) } } diff --git a/src/bin/cargo.rs b/src/bin/cargo.rs index 630c6da03..d966d0034 100644 --- a/src/bin/cargo.rs +++ b/src/bin/cargo.rs @@ -1,16 +1,19 @@ -#![feature(collections, core, old_io, old_path, env)] +#![feature(collections, core, io, path, process, fs, env, std_misc, os, old_io)] extern crate "git2-curl" as git2_curl; extern crate "rustc-serialize" as rustc_serialize; extern crate cargo; extern crate env_logger; +extern crate toml; #[macro_use] extern crate log; use std::collections::BTreeSet; use std::env; -use std::old_io::fs::{self, PathExtensions}; -use std::old_io::process::{Command,InheritFd,ExitStatus,ExitSignal}; -use std::old_io; +use std::fs; +use std::io::prelude::*; +use std::io; +use std::path::{PathBuf, Path}; +use std::process::Command; use cargo::{execute_main_without_stdin, handle_error, shell}; use cargo::core::MultiShell; @@ -163,28 +166,24 @@ fn execute_subcommand(cmd: &str, args: &[String], shell: &mut MultiShell) { return handle_error(CliError::new(&msg, 127), shell) } }; - let status = Command::new(command) - .args(&args[1..]) - .stdin(InheritFd(0)) - .stdout(InheritFd(1)) - .stderr(InheritFd(2)) - .status(); - - match status { - Ok(ExitStatus(0)) => (), - Ok(ExitStatus(i)) => { - handle_error(CliError::new("", i as i32), shell) + match Command::new(&command).args(&args[1..]).status() { + Ok(ref status) if status.success() => {} + Ok(ref status) => { + match status.code() { + Some(code) => handle_error(CliError::new("", code), shell), + None => { + let msg = format!("subcommand failed with: {}", status); + handle_error(CliError::new(&msg, 101), shell) + } + } } - Ok(ExitSignal(i)) => { - let msg = format!("subcommand failed with signal: {}", i); - handle_error(CliError::new(&msg, i as i32), shell) + Err(ref e) if e.kind() == io::ErrorKind::FileNotFound => { + handle_error(CliError::new("No such subcommand", 127), shell) + } + Err(err) => { + let msg = format!("Subcommand failed to run: {}", err); + handle_error(CliError::new(&msg, 127), shell) } - Err(old_io::IoError{kind, ..}) if kind == old_io::FileNotFound => - handle_error(CliError::new("No such subcommand", 127), shell), - Err(err) => handle_error( - CliError::new( - &format!("Subcommand failed to run: {}", err), 127), - shell) } } @@ -194,18 +193,20 @@ fn list_commands() -> BTreeSet { let command_prefix = "cargo-"; let mut commands = BTreeSet::new(); for dir in list_command_directory().iter() { - let entries = match fs::readdir(dir) { + let entries = match fs::read_dir(dir) { Ok(entries) => entries, _ => continue }; - for entry in entries.iter() { - let filename = match entry.filename_str() { + for entry in entries { + let entry = match entry { Ok(e) => e, Err(..) => continue }; + let entry = entry.path(); + let filename = match entry.file_name().and_then(|s| s.to_str()) { Some(filename) => filename, _ => continue }; if filename.starts_with(command_prefix) && - filename.ends_with(env::consts::EXE_SUFFIX) && - is_executable(entry) { + filename.ends_with(env::consts::EXE_SUFFIX) && + is_executable(&entry) { let command = &filename[ command_prefix.len().. filename.len() - env::consts::EXE_SUFFIX.len()]; @@ -221,16 +222,20 @@ fn list_commands() -> BTreeSet { commands } +#[cfg(unix)] fn is_executable(path: &Path) -> bool { - match fs::stat(path) { - Ok(old_io::FileStat{ kind: old_io::FileType::RegularFile, perm, ..}) => - perm.contains(old_io::OTHER_EXECUTE), - _ => false - } + use std::os::unix::prelude::*; + path.metadata().map(|m| { + m.permissions().mode() & 0o001 == 0o001 + }).unwrap_or(false) +} +#[cfg(windows)] +fn is_executable(path: &Path) -> bool { + path.is_file() } /// Get `Command` to run given command. -fn find_command(cmd: &str) -> Option { +fn find_command(cmd: &str) -> Option { let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); let dirs = list_command_directory(); let mut command_paths = dirs.iter().map(|dir| dir.join(&command_exe)); @@ -238,7 +243,7 @@ fn find_command(cmd: &str) -> Option { } /// List candidate locations where subcommands might be installed. -fn list_command_directory() -> Vec { +fn list_command_directory() -> Vec { let mut dirs = vec![]; if let Ok(mut path) = env::current_exe() { path.pop(); diff --git a/src/bin/locate_project.rs b/src/bin/locate_project.rs index af2f45a60..4c870ba4d 100644 --- a/src/bin/locate_project.rs +++ b/src/bin/locate_project.rs @@ -24,7 +24,7 @@ pub fn execute(flags: LocateProjectFlags, _: &Config) -> CliResult> { let root = try!(find_root_manifest_for_cwd(flags.flag_manifest_path)); - let string = try!(root.as_str() + let string = try!(root.to_str() .chain_error(|| human("Your project path contains \ characters not representable in \ Unicode")) diff --git a/src/bin/read_manifest.rs b/src/bin/read_manifest.rs index 6a89cdb59..d72f2a8cb 100644 --- a/src/bin/read_manifest.rs +++ b/src/bin/read_manifest.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use cargo::core::{Package, Source}; use cargo::util::{CliResult, CliError, Config}; use cargo::sources::{PathSource}; diff --git a/src/bin/run.rs b/src/bin/run.rs index 57fadbb07..2b8c7b973 100644 --- a/src/bin/run.rs +++ b/src/bin/run.rs @@ -1,5 +1,3 @@ -use std::old_io::process::ExitStatus; - use cargo::ops; use cargo::core::manifest::TargetKind; use cargo::util::{CliResult, CliError, human, Config}; @@ -86,9 +84,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { match err { None => Ok(None), Some(err) => { - Err(match err.exit { - Some(ExitStatus(i)) => CliError::from_boxed(box err, i as i32), - _ => CliError::from_boxed(box err, 101), + Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::from_error(err, i), + None => CliError::from_error(err, 101), }) } } diff --git a/src/bin/test.rs b/src/bin/test.rs index ef326719e..7eedac427 100644 --- a/src/bin/test.rs +++ b/src/bin/test.rs @@ -1,5 +1,3 @@ -use std::old_io::process::ExitStatus; - use cargo::ops; use cargo::util::{CliResult, CliError, Human, Config}; use cargo::util::important_paths::{find_root_manifest_for_cwd}; @@ -78,9 +76,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { match err { None => Ok(None), Some(err) => { - Err(match err.exit { - Some(ExitStatus(i)) => CliError::new("", i as i32), - _ => CliError::from_boxed(box Human(err), 101) + Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::new("", i), + None => CliError::from_error(Human(err), 101) }) } } diff --git a/src/bin/verify_project.rs b/src/bin/verify_project.rs index 816c5e9bb..33c271a9e 100644 --- a/src/bin/verify_project.rs +++ b/src/bin/verify_project.rs @@ -1,9 +1,9 @@ -extern crate toml; - use std::collections::HashMap; use std::env; -use std::old_io::File; +use std::fs::File; +use std::io::prelude::*; +use toml; use cargo::util::{CliResult, Config}; pub type Error = HashMap; @@ -28,9 +28,10 @@ Options: pub fn execute(args: Flags, config: &Config) -> CliResult> { config.shell().set_verbose(args.flag_verbose); - let file = Path::new(args.flag_manifest_path); - let contents = match File::open(&file).read_to_string() { - Ok(s) => s, + let mut contents = String::new(); + let file = File::open(&args.flag_manifest_path); + match file.and_then(|mut f| f.read_to_string(&mut contents)) { + Ok(()) => {}, Err(e) => return fail("invalid", format!("error reading file: {}", e).as_slice()) }; diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index f942a2471..eadaa12d9 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -1,4 +1,5 @@ use std::hash; +use std::path::{PathBuf, Path}; use semver::Version; use rustc_serialize::{Encoder,Encodable}; @@ -13,8 +14,8 @@ use util::{CargoResult, human}; pub struct Manifest { summary: Summary, targets: Vec, - target_dir: Path, - doc_dir: Path, + target_dir: PathBuf, + doc_dir: PathBuf, links: Option, warnings: Vec, exclude: Vec, @@ -308,7 +309,7 @@ impl hash::Hash for Profile { pub struct Target { kind: TargetKind, name: String, - src_path: Path, + src_path: PathBuf, profile: Profile, metadata: Option, } @@ -344,7 +345,7 @@ impl Encodable for Target { impl Manifest { pub fn new(summary: Summary, targets: Vec, - target_dir: Path, doc_dir: Path, + target_dir: PathBuf, doc_dir: PathBuf, exclude: Vec, include: Vec, links: Option, @@ -386,7 +387,7 @@ impl Manifest { self.summary = summary; } - pub fn set_target_dir(&mut self, target_dir: Path) { + pub fn set_target_dir(&mut self, target_dir: PathBuf) { self.target_dir = target_dir; } } @@ -406,7 +407,7 @@ impl Target { Target { kind: TargetKind::Lib(crate_targets), name: name.to_string(), - src_path: src_path.clone(), + src_path: src_path.to_path_buf(), profile: profile.clone(), metadata: Some(metadata) } @@ -417,7 +418,7 @@ impl Target { Target { kind: TargetKind::Bin, name: name.to_string(), - src_path: src_path.clone(), + src_path: src_path.to_path_buf(), profile: profile.clone(), metadata: metadata, } @@ -429,7 +430,7 @@ impl Target { Target { kind: TargetKind::Bin, name: name.to_string(), - src_path: src_path.clone(), + src_path: src_path.to_path_buf(), profile: profile.clone(), metadata: metadata, } @@ -439,7 +440,7 @@ impl Target { Target { kind: TargetKind::Example, name: name.to_string(), - src_path: src_path.clone(), + src_path: src_path.to_path_buf(), profile: profile.clone(), metadata: None, } @@ -450,7 +451,7 @@ impl Target { Target { kind: TargetKind::Bin, name: name.to_string(), - src_path: src_path.clone(), + src_path: src_path.to_path_buf(), profile: profile.clone(), metadata: Some(metadata), } @@ -461,7 +462,7 @@ impl Target { Target { kind: TargetKind::Bin, name: name.to_string(), - src_path: src_path.clone(), + src_path: src_path.to_path_buf(), profile: profile.clone(), metadata: Some(metadata), } diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 58e7415b6..72613af2f 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -1,6 +1,7 @@ use std::fmt::{self, Formatter}; use std::hash; use std::slice; +use std::path::{Path, PathBuf}; use semver::Version; use core::{ @@ -25,7 +26,7 @@ pub struct Package { // The package's manifest manifest: Manifest, // The root of the package - manifest_path: Path, + manifest_path: PathBuf, // Where this package came from source_id: SourceId, } @@ -63,7 +64,7 @@ impl Package { source_id: &SourceId) -> Package { Package { manifest: manifest, - manifest_path: manifest_path.clone(), + manifest_path: manifest_path.to_path_buf(), source_id: source_id.clone(), } } @@ -73,13 +74,13 @@ impl Package { pub fn manifest_path(&self) -> &Path { &self.manifest_path } pub fn name(&self) -> &str { self.package_id().name() } pub fn package_id(&self) -> &PackageId { self.manifest.package_id() } - pub fn root(&self) -> Path { self.manifest_path.dir_path() } + pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() } pub fn summary(&self) -> &Summary { self.manifest.summary() } pub fn target_dir(&self) -> &Path { self.manifest.target_dir() } pub fn targets(&self) -> &[Target] { self.manifest().targets() } pub fn version(&self) -> &Version { self.package_id().version() } - pub fn absolute_target_dir(&self) -> Path { + pub fn absolute_target_dir(&self) -> PathBuf { self.root().join(self.target_dir()) } diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index a9cf3bd08..9eb6bbd93 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -1,8 +1,9 @@ -use std::collections::hash_map::{HashMap, Values, IterMut}; use std::cmp::Ordering; +use std::collections::hash_map::{HashMap, Values, IterMut}; use std::fmt::{self, Formatter}; use std::hash; use std::mem; +use std::path::{Path, PathBuf}; use std::sync::Arc; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; @@ -128,7 +129,7 @@ impl SourceId { SourceId::new(Kind::Registry, url) .with_precise(Some("locked".to_string())) } - "path" => SourceId::for_path(&Path::new(&url[5..])).unwrap(), + "path" => SourceId::for_path(Path::new(&url[5..])).unwrap(), _ => panic!("Unsupported serialized SourceId") } } @@ -197,7 +198,7 @@ impl SourceId { match self.inner.kind { Kind::Git(..) => Box::new(GitSource::new(self, config)) as Box, Kind::Path => { - let path = match self.inner.url.to_file_path() { + let path = match self.inner.url.to_file_path::() { Ok(p) => p, Err(()) => panic!("path sources cannot be remote"), }; diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index f45a46398..7defa760c 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -1,25 +1,25 @@ #![deny(unused)] -#![feature(collections, hash, libc, os, std_misc, unicode, env, core)] -#![feature(old_io, old_path, str_words)] +#![feature(collections, hash, os, std_misc, unicode, env, core)] +#![feature(io, path, str_words, process, fs, old_io)] #![cfg_attr(test, deny(warnings))] -extern crate libc; -extern crate "rustc-serialize" as rustc_serialize; -extern crate regex; -extern crate term; -extern crate time; +#[cfg(test)] extern crate hamcrest; #[macro_use] extern crate log; - +extern crate "rustc-serialize" as rustc_serialize; extern crate curl; extern crate docopt; extern crate flate2; extern crate git2; extern crate glob; +extern crate libc; +extern crate regex; extern crate semver; extern crate tar; +extern crate term; +extern crate threadpool; +extern crate time; extern crate toml; extern crate url; -#[cfg(test)] extern crate hamcrest; extern crate registry; diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index e8129d110..bbfab64c7 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -1,5 +1,7 @@ use std::default::Default; -use std::old_io::fs::{self, PathExtensions}; +use std::fs; +use std::io::prelude::*; +use std::path::Path; use core::PackageSet; use core::source::{Source, SourceMap}; @@ -15,7 +17,7 @@ pub struct CleanOptions<'a, 'b: 'a> { /// Cleans the project from build artifacts. pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { - let mut src = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), opts.config)); try!(src.update()); let root = try!(src.root_package()); @@ -55,11 +57,10 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { // And finally, clean everything out! for target in pkg.targets().iter() { let layout = Layout::new(&root, opts.target, target.profile().dest()); - try!(rm_rf(&layout.native(&pkg))); try!(rm_rf(&layout.fingerprint(&pkg))); for filename in try!(cx.target_filenames(target)).iter() { - try!(rm_rf(&layout.dest().join(filename))); - try!(rm_rf(&layout.deps().join(filename))); + try!(rm_rf(&layout.dest().join(&filename))); + try!(rm_rf(&layout.deps().join(&filename))); } } @@ -68,11 +69,11 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { fn rm_rf(path: &Path) -> CargoResult<()> { if path.is_dir() { - try!(fs::rmdir_recursive(path).chain_error(|| { + try!(fs::remove_dir_all(path).chain_error(|| { human("could not remove build directory") })); } else if path.exists() { - try!(fs::unlink(path).chain_error(|| { + try!(fs::remove_file(path).chain_error(|| { human("failed to remove build artifact") })); } diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 37a5f0993..2c8fd0b1a 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -26,6 +26,7 @@ use std::collections::HashMap; use std::default::Default; use std::num::ToPrimitive; use std::os; +use std::path::Path; use std::sync::Arc; use core::registry::PackageRegistry; @@ -58,7 +59,7 @@ pub fn compile(manifest_path: &Path, -> CargoResult { debug!("compile; manifest-path={}", manifest_path.display()); - let mut source = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), options.config)); try!(source.update()); @@ -163,7 +164,7 @@ pub fn compile_pkg(package: &Package, options: &CompileOptions) return Ok(ret); } -fn source_ids_from_config(config: &Config, cur_path: Path) +fn source_ids_from_config(config: &Config, cur_path: &Path) -> CargoResult> { let configs = try!(config.values()); @@ -180,11 +181,11 @@ fn source_ids_from_config(config: &Config, cur_path: Path) // The path listed next to the string is the config file in which the // key was located, so we want to pop off the `.cargo/config` component // to get the directory containing the `.cargo` folder. - p.dir_path().dir_path().join(s) + p.parent().unwrap().parent().unwrap().join(s) }).filter(|p| { // Make sure we don't override the local package, even if it's in the // list of override paths. - cur_path != *p + cur_path != &**p }).map(|p| SourceId::for_path(&p)).collect() } diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index 1dd0df548..fd7304caf 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -1,11 +1,12 @@ -use std::old_io::fs::PathExtensions; use std::collections::HashSet; +use std::io::prelude::*; +use std::path::Path; +use std::process::Command; use core::PackageIdSpec; use core::source::Source; use ops; use sources::PathSource; -use std::old_io::process::Command; use util::{CargoResult, human}; pub struct DocOptions<'a, 'b: 'a> { @@ -16,7 +17,7 @@ pub struct DocOptions<'a, 'b: 'a> { pub fn doc(manifest_path: &Path, options: &DocOptions) -> CargoResult<()> { - let mut source = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), options.compile_opts.config)); try!(source.update()); let package = try!(source.root_package()); @@ -54,7 +55,7 @@ pub fn doc(manifest_path: &Path, } }; - let path = package.absolute_target_dir().join("doc").join(name) + let path = package.absolute_target_dir().join("doc").join(&name) .join("index.html"); if path.exists() { open_docs(&path); @@ -67,19 +68,19 @@ pub fn doc(manifest_path: &Path, #[cfg(not(any(target_os = "windows", target_os = "macos")))] fn open_docs(path: &Path) { // trying xdg-open - match Command::new("xdg-open").arg(path).detached().status() { + match Command::new("xdg-open").arg(path).status() { Ok(_) => return, Err(_) => () }; // trying gnome-open - match Command::new("gnome-open").arg(path).detached().status() { + match Command::new("gnome-open").arg(path).status() { Ok(_) => return, Err(_) => () }; // trying kde-open - match Command::new("kde-open").arg(path).detached().status() { + match Command::new("kde-open").arg(path).status() { Ok(_) => return, Err(_) => () }; @@ -87,7 +88,7 @@ fn open_docs(path: &Path) { #[cfg(target_os = "windows")] fn open_docs(path: &Path) { - match Command::new("start").arg(path).detached().status() { + match Command::new("start").arg(path).status() { Ok(_) => return, Err(_) => () }; @@ -95,7 +96,7 @@ fn open_docs(path: &Path) { #[cfg(target_os = "macos")] fn open_docs(path: &Path) { - match Command::new("open").arg(path).detached().status() { + match Command::new("open").arg(path).status() { Ok(_) => return, Err(_) => () }; diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index 9ba0f0058..d4faeffcd 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use core::registry::PackageRegistry; use core::{Source, PackageId}; use ops; @@ -6,7 +8,7 @@ use util::{CargoResult, Config, human, ChainError}; /// Executes `cargo fetch`. pub fn fetch(manifest_path: &Path, config: &Config) -> CargoResult<()> { - let mut source = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), config)); try!(source.update()); let package = try!(source.root_package()); diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 77d6c3c01..a314003b8 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -1,4 +1,5 @@ use std::collections::HashSet; +use std::path::Path; use core::PackageId; use core::registry::PackageRegistry; @@ -18,7 +19,7 @@ pub struct UpdateOptions<'a, 'b: 'a> { pub fn generate_lockfile(manifest_path: &Path, config: &Config) -> CargoResult<()> { - let mut source = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), config)); try!(source.update()); let package = try!(source.root_package()); @@ -32,7 +33,7 @@ pub fn generate_lockfile(manifest_path: &Path, config: &Config) pub fn update_lockfile(manifest_path: &Path, opts: &UpdateOptions) -> CargoResult<()> { - let mut source = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), opts.config)); try!(source.update()); let package = try!(source.root_package()); diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index b8b38b4a2..5038495bc 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -1,6 +1,8 @@ use std::env; -use std::old_io::fs::PathExtensions; -use std::old_io::{self, fs, File}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::io; +use std::path::Path; use rustc_serialize::{Decodable, Decoder}; @@ -44,7 +46,10 @@ pub fn new(opts: NewOptions, config: &Config) -> CargoResult<()> { return Err(human(format!("Destination `{}` already exists", path.display()))) } - let name = path.filename_str().unwrap(); + let name = try!(path.file_name().and_then(|s| s.to_str()).chain_error(|| { + human(&format!("cannot create a project with a non-unicode name: {:?}", + path.file_name().unwrap())) + })); for c in name.chars() { if c.is_alphanumeric() { continue } if c == '_' || c == '-' { continue } @@ -61,11 +66,15 @@ fn existing_vcs_repo(path: &Path) -> bool { GitRepo::discover(path).is_ok() || HgRepo::discover(path).is_ok() } +fn file(p: &Path, contents: &[u8]) -> io::Result<()> { + try!(File::create(p)).write_all(contents) +} + fn mk(config: &Config, path: &Path, name: &str, opts: &NewOptions) -> CargoResult<()> { let cfg = try!(global_config(config)); let mut ignore = "target\n".to_string(); - let in_existing_vcs_repo = existing_vcs_repo(&path.dir_path()); + let in_existing_vcs_repo = existing_vcs_repo(path.parent().unwrap()); if !opts.bin { ignore.push_str("Cargo.lock\n"); } @@ -80,14 +89,14 @@ fn mk(config: &Config, path: &Path, name: &str, match vcs { VersionControl::Git => { try!(GitRepo::init(path)); - try!(File::create(&path.join(".gitignore")).write_all(ignore.as_bytes())); + try!(file(&path.join(".gitignore"), ignore.as_bytes())); }, VersionControl::Hg => { try!(HgRepo::init(path)); - try!(File::create(&path.join(".hgignore")).write_all(ignore.as_bytes())); + try!(file(&path.join(".hgignore"), ignore.as_bytes())); }, VersionControl::NoVcs => { - try!(fs::mkdir(path, old_io::USER_RWX)); + try!(fs::create_dir(path)); }, }; @@ -102,24 +111,24 @@ fn mk(config: &Config, path: &Path, name: &str, (None, None, name, None) => name, }; - try!(File::create(&path.join("Cargo.toml")).write_str(&format!( + try!(file(&path.join("Cargo.toml"), format!( r#"[package] name = "{}" version = "0.0.1" authors = ["{}"] -"#, name, author))); +"#, name, author).as_bytes())); - try!(fs::mkdir(&path.join("src"), old_io::USER_RWX)); + try!(fs::create_dir(&path.join("src"))); if opts.bin { - try!(File::create(&path.join("src/main.rs")).write_str("\ + try!(file(&path.join("src/main.rs"), b"\ fn main() { println!(\"Hello, world!\"); } ")); } else { - try!(File::create(&path.join("src/lib.rs")).write_str("\ + try!(file(&path.join("src/lib.rs"), b"\ #[test] fn it_works() { } diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index d2840c7fe..426181a1a 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -1,23 +1,23 @@ -use std::old_io::{fs, File, USER_DIR}; -use std::old_io::fs::PathExtensions; -use std::old_path; +use std::io::prelude::*; +use std::fs::{self, File}; +use std::path::{self, Path, PathBuf}; use tar::Archive; -use flate2::{GzBuilder, BestCompression}; -use flate2::reader::GzDecoder; +use flate2::{GzBuilder, Compression}; +use flate2::read::GzDecoder; use core::source::{Source, SourceId}; use core::Package; use sources::PathSource; -use util::{CargoResult, human, internal, ChainError, Config}; +use util::{self, CargoResult, human, internal, ChainError, Config}; use ops; -struct Bomb { path: Option } +struct Bomb { path: Option } impl Drop for Bomb { fn drop(&mut self) { match self.path.as_ref() { - Some(path) => { let _ = fs::unlink(path); } + Some(path) => { let _ = fs::remove_file(path); } None => {} } } @@ -27,8 +27,8 @@ pub fn package(manifest_path: &Path, config: &Config, verify: bool, list: bool, - metadata: bool) -> CargoResult> { - let mut src = try!(PathSource::for_path(&manifest_path.dir_path(), + metadata: bool) -> CargoResult> { + let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), config)); try!(src.update()); let pkg = try!(src.root_package()); @@ -38,9 +38,9 @@ pub fn package(manifest_path: &Path, } if list { - let root = pkg.manifest_path().dir_path(); + let root = pkg.root(); let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| { - file.path_relative_from(&root).unwrap() + file.relative_from(&root).unwrap().to_path_buf() }).collect(); list.sort(); for file in list.iter() { @@ -50,7 +50,7 @@ pub fn package(manifest_path: &Path, } let filename = format!("package/{}-{}.crate", pkg.name(), pkg.version()); - let dst = pkg.absolute_target_dir().join(filename); + let dst = pkg.absolute_target_dir().join(&filename); if dst.exists() { return Ok(Some(dst)) } let mut bomb = Bomb { path: Some(dst.clone()) }; @@ -110,21 +110,22 @@ fn tar(pkg: &Package, src: &PathSource, config: &Config, dst.display()))) } - try!(fs::mkdir_recursive(&dst.dir_path(), USER_DIR)); + try!(fs::create_dir_all(dst.parent().unwrap())); let tmpfile = try!(File::create(dst)); // Prepare the encoder and its header - let encoder = GzBuilder::new().filename(dst.filename().unwrap()) - .writer(tmpfile, BestCompression); + let filename = Path::new(dst.file_name().unwrap()); + let encoder = GzBuilder::new().filename(try!(util::path2bytes(filename))) + .write(tmpfile, Compression::Best); // Put all package files into a compressed archive let ar = Archive::new(encoder); - let root = pkg.manifest_path().dir_path(); + let root = pkg.root(); for file in try!(src.list_files(pkg)).iter() { - if file == dst { continue } - let relative = file.path_relative_from(&root).unwrap(); - let relative = try!(relative.as_str().chain_error(|| { + if &**file == dst { continue } + let relative = file.relative_from(&root).unwrap(); + let relative = try!(relative.to_str().chain_error(|| { human(format!("non-utf8 path in source directory: {}", relative.display())) })); @@ -133,7 +134,7 @@ fn tar(pkg: &Package, src: &PathSource, config: &Config, shell.status("Archiving", &relative) })); let path = format!("{}-{}{}{}", pkg.name(), pkg.version(), - old_path::SEP, relative); + path::MAIN_SEPARATOR, relative); try!(ar.append(&path, &mut file).chain_error(|| { internal(format!("could not archive source file `{}`", relative)) })); @@ -147,13 +148,13 @@ fn run_verify(config: &Config, pkg: &Package, tar: &Path) try!(config.shell().status("Verifying", pkg)); let f = try!(GzDecoder::new(try!(File::open(tar)))); - let dst = pkg.root().join(format!("target/package/{}-{}", - pkg.name(), pkg.version())); + let dst = pkg.root().join(&format!("target/package/{}-{}", + pkg.name(), pkg.version())); if dst.exists() { - try!(fs::rmdir_recursive(&dst)); + try!(fs::remove_dir_all(&dst)); } let mut archive = Archive::new(f); - try!(archive.unpack(&dst.dir_path())); + try!(archive.unpack(dst.parent().unwrap())); let manifest_path = dst.join("Cargo.toml"); // When packages are uploaded to the registry, all path dependencies are diff --git a/src/cargo/ops/cargo_pkgid.rs b/src/cargo/ops/cargo_pkgid.rs index 4413fef2d..d900510b3 100644 --- a/src/cargo/ops/cargo_pkgid.rs +++ b/src/cargo/ops/cargo_pkgid.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use ops; use core::{Source, PackageIdSpec}; use sources::{PathSource}; @@ -6,7 +8,7 @@ use util::{CargoResult, human, Config}; pub fn pkgid(manifest_path: &Path, spec: Option<&str>, config: &Config) -> CargoResult { - let mut source = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(), config)); try!(source.update()); let package = try!(source.root_package()); diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs index f381f7a1a..79de52a5e 100644 --- a/src/cargo/ops/cargo_read_manifest.rs +++ b/src/cargo/ops/cargo_read_manifest.rs @@ -1,7 +1,9 @@ use std::collections::HashSet; use std::error::FromError; -use std::old_io::fs::PathExtensions; -use std::old_io::{self, File, fs}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::io; +use std::path::{Path, PathBuf}; use core::{Package,Manifest,SourceId}; use util::{self, CargoResult, human, Config, ChainError}; @@ -10,7 +12,7 @@ use util::toml::{Layout, project_layout}; pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId, config: &Config) - -> CargoResult<(Manifest, Vec)> { + -> CargoResult<(Manifest, Vec)> { let root = layout.root.clone(); util::toml::to_manifest(contents, source_id, layout, config).chain_error(|| { human(format!("failed to parse manifest at `{}`", @@ -19,12 +21,13 @@ pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId, } pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) - -> CargoResult<(Package, Vec)> { + -> CargoResult<(Package, Vec)> { trace!("read_package; path={}; source-id={}", path.display(), source_id); let mut file = try!(File::open(path)); - let data = try!(file.read_to_end()); + let mut data = Vec::new(); + try!(file.read_to_end(&mut data)); - let layout = project_layout(&path.dir_path()); + let layout = project_layout(path.parent().unwrap()); let (manifest, nested) = try!(read_manifest(&data, layout, source_id, config)); @@ -34,21 +37,24 @@ pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) -> CargoResult> { let mut all_packages = HashSet::new(); - let mut visited = HashSet::::new(); + let mut visited = HashSet::::new(); trace!("looking for root package: {}, source_id={}", path.display(), source_id); - try!(walk(path, |dir| { + try!(walk(path, &mut |dir| { trace!("looking for child package: {}", dir.display()); // Don't recurse into git databases - if dir.filename_str() == Some(".git") { return Ok(false); } + if dir.file_name().and_then(|s| s.to_str()) == Some(".git") { + return Ok(false); + } // Don't automatically discover packages across git submodules if dir != path && dir.join(".git").exists() { return Ok(false); } // Don't ever look at target directories - if dir.filename_str() == Some("target") && has_manifest(&dir.dir_path()) { + if dir.file_name().and_then(|s| s.to_str()) == Some("target") && + has_manifest(dir.parent().unwrap()) { return Ok(false) } @@ -66,34 +72,29 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) } } -fn walk(path: &Path, mut callback: F) -> CargoResult<()> +fn walk(path: &Path, callback: &mut F) -> CargoResult<()> where F: FnMut(&Path) -> CargoResult { - walk_inner(path, &mut callback) -} + if !path.is_dir() { return Ok(()) } -fn walk_inner(path: &Path, callback: &mut F) -> CargoResult<()> - where F: FnMut(&Path) -> CargoResult -{ - if path.is_dir() { - let continues = try!(callback(path)); - if !continues { - trace!("not processing {}", path.display()); - return Ok(()); - } - - // Ignore any permission denied errors because temporary directories - // can often have some weird permissions on them. - let dirs = match fs::readdir(path) { - Ok(dirs) => dirs, - Err(ref e) if e.kind == old_io::PermissionDenied => return Ok(()), - Err(e) => return Err(FromError::from_error(e)), - }; - for dir in dirs.iter() { - try!(walk_inner(dir, callback)); - } + if !try!(callback(path)) { + trace!("not processing {}", path.display()); + return Ok(()); } + // Ignore any permission denied errors because temporary directories + // can often have some weird permissions on them. + let dirs = match fs::read_dir(path) { + Ok(dirs) => dirs, + Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => { + return Ok(()) + } + Err(e) => return Err(FromError::from_error(e)), + }; + for dir in dirs { + let dir = try!(dir).path(); + try!(walk(&dir, callback)); + } Ok(()) } @@ -105,8 +106,8 @@ fn read_nested_packages(path: &Path, all_packages: &mut HashSet, source_id: &SourceId, config: &Config, - visited: &mut HashSet) -> CargoResult<()> { - if !visited.insert(path.clone()) { return Ok(()) } + visited: &mut HashSet) -> CargoResult<()> { + if !visited.insert(path.to_path_buf()) { return Ok(()) } let manifest = try!(find_project_manifest_exact(path, "Cargo.toml")); @@ -115,9 +116,16 @@ fn read_nested_packages(path: &Path, // Registry sources are not allowed to have `path=` dependencies because // they're all translated to actual registry dependencies. + // + // We normalize the path here ensure that we don't infinitely walk around + // looking for crates. By normalizing we ensure that we visit this crate at + // most once. + // + // TODO: filesystem/symlink implications? if !source_id.is_registry() { for p in nested.iter() { - try!(read_nested_packages(&path.join(p), all_packages, source_id, + let path = util::normalize_path(&path.join(p)); + try!(read_nested_packages(&path, all_packages, source_id, config, visited)); } } diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs index 8c591c196..6ad88a8d2 100644 --- a/src/cargo/ops/cargo_run.rs +++ b/src/cargo/ops/cargo_run.rs @@ -1,3 +1,4 @@ +use std::path::Path; use ops::{self, ExecEngine}; use util::{CargoResult, human, process, ProcessError, ChainError}; @@ -11,7 +12,8 @@ pub fn run(manifest_path: &Path, options: &ops::CompileOptions, args: &[String]) -> CargoResult> { let config = options.config; - let mut src = try!(PathSource::for_path(&manifest_path.dir_path(), config)); + let mut src = try!(PathSource::for_path(&manifest_path.parent().unwrap(), + config)); try!(src.update()); let root = try!(src.root_package()); let env = options.env; @@ -45,7 +47,7 @@ pub fn run(manifest_path: &Path, } let compile = try!(ops::compile(manifest_path, options)); - let dst = manifest_path.dir_path().join("target"); + let dst = manifest_path.parent().unwrap().join("target"); let dst = match options.target { Some(target) => dst.join(target), None => dst, @@ -56,14 +58,13 @@ pub fn run(manifest_path: &Path, (None, true) => dst.join("examples").join(bin.name()), (None, false) => dst.join(bin.name()), }; - let exe = match exe.path_relative_from(config.cwd()) { + let exe = match exe.relative_from(config.cwd()) { Some(path) => path, - None => exe, + None => &*exe, }; - let process = try!(try!(compile.target_process(exe, &root)) - .into_process_builder()) - .args(args) - .cwd(config.cwd().clone()); + let mut process = try!(compile.target_process(exe, &root)) + .into_process_builder(); + process.args(args).cwd(config.cwd()); try!(config.shell().status("Running", process.to_string())); Ok(process.exec().err()) diff --git a/src/cargo/ops/cargo_rustc/compilation.rs b/src/cargo/ops/cargo_rustc/compilation.rs index 3d362d449..ff4b561fe 100644 --- a/src/cargo/ops/cargo_rustc/compilation.rs +++ b/src/cargo/ops/cargo_rustc/compilation.rs @@ -1,7 +1,7 @@ use std::collections::{HashMap, HashSet}; use std::dynamic_lib::DynamicLibrary; -use std::ffi::CString; -use std::old_path::BytesContainer; +use std::ffi::AsOsStr; +use std::path::PathBuf; use semver::Version; use core::{PackageId, Package}; @@ -15,30 +15,30 @@ pub struct Compilation { /// /// This is currently used for passing --extern flags to rustdoc tests later /// on. - pub libraries: HashMap>, + pub libraries: HashMap>, /// An array of all tests created during this compilation. - pub tests: Vec<(String, Path)>, + pub tests: Vec<(String, PathBuf)>, /// An array of all binaries created. - pub binaries: Vec, + pub binaries: Vec, /// All directires for the output of native build commands. /// /// This is currently used to drive some entries which are added to the /// LD_LIBRARY_PATH as appropriate. // TODO: deprecated, remove - pub native_dirs: HashMap, + pub native_dirs: HashMap, /// Root output directory (for the local package's artifacts) - pub root_output: Path, + pub root_output: PathBuf, /// Output directory for rust dependencies - pub deps_output: Path, + pub deps_output: PathBuf, /// Extra environment variables that were passed to compilations and should /// be passed to future invocations of programs. - pub extra_env: HashMap>, + pub extra_env: HashMap, /// Top-level package that was compiled pub package: Package, @@ -52,8 +52,8 @@ impl Compilation { Compilation { libraries: HashMap::new(), native_dirs: HashMap::new(), // TODO: deprecated, remove - root_output: Path::new("/"), - deps_output: Path::new("/"), + root_output: PathBuf::new("/"), + deps_output: PathBuf::new("/"), tests: Vec::new(), binaries: Vec::new(), extra_env: HashMap::new(), @@ -73,17 +73,15 @@ impl Compilation { } /// See `process`. - pub fn target_process(&self, cmd: T, pkg: &Package) - -> CargoResult { - let cmd = try!(CString::new(cmd.container_as_bytes())); - self.process(CommandType::Target(cmd), pkg) + pub fn target_process(&self, cmd: &T, pkg: &Package) + -> CargoResult { + self.process(CommandType::Target(cmd.as_os_str().to_os_string()), pkg) } /// See `process`. - pub fn host_process(&self, cmd: T, pkg: &Package) - -> CargoResult { - let cmd = try!(CString::new(cmd.container_as_bytes())); - self.process(CommandType::Host(cmd), pkg) + pub fn host_process(&self, cmd: &T, pkg: &Package) + -> CargoResult { + self.process(CommandType::Host(cmd.as_os_str().to_os_string()), pkg) } /// Prepares a new process with an appropriate environment to run against @@ -93,7 +91,7 @@ impl Compilation { /// well as the working directory of the child process. pub fn process(&self, cmd: CommandType, pkg: &Package) -> CargoResult { - let mut search_path = DynamicLibrary::search_path(); + let mut search_path = util::dylib_path(); for dir in self.native_dirs.values() { search_path.push(dir.clone()); } @@ -101,30 +99,26 @@ impl Compilation { search_path.push(self.deps_output.clone()); let search_path = try!(util::join_paths(&search_path, DynamicLibrary::envvar())); - let mut cmd = try!(CommandPrototype::new(cmd)).env( - DynamicLibrary::envvar(), Some(&search_path)); + let mut cmd = try!(CommandPrototype::new(cmd)); + cmd.env(DynamicLibrary::envvar(), &search_path); for (k, v) in self.extra_env.iter() { - cmd = cmd.env(k, v.as_ref()); + cmd.env(k, v); } - Ok(cmd.env("CARGO_MANIFEST_DIR", Some(pkg.manifest_path().dir_path())) - .env("CARGO_PKG_VERSION_MAJOR", - Some(pkg.version().major.to_string())) - .env("CARGO_PKG_VERSION_MINOR", - Some(pkg.version().minor.to_string())) - .env("CARGO_PKG_VERSION_PATCH", - Some(pkg.version().patch.to_string())) - .env("CARGO_PKG_VERSION_PRE", - pre_version_component(pkg.version())) - .env("CARGO_PKG_VERSION", - Some(pkg.version().to_string())) - .cwd(pkg.root())) + cmd.env("CARGO_MANIFEST_DIR", pkg.root()) + .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) + .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) + .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) + .env("CARGO_PKG_VERSION_PRE", &pre_version_component(pkg.version())) + .env("CARGO_PKG_VERSION", &pkg.version().to_string()) + .cwd(pkg.root()); + Ok(cmd) } } -fn pre_version_component(v: &Version) -> Option { +fn pre_version_component(v: &Version) -> String { if v.pre.is_empty() { - return None; + return String::new(); } let mut ret = String::new(); @@ -134,5 +128,5 @@ fn pre_version_component(v: &Version) -> Option { ret.push_str(&x.to_string()); } - Some(ret) + ret } diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index 1e02eb60a..e297ba405 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -2,6 +2,7 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::hash_map::HashMap; use std::str; use std::sync::Arc; +use std::path::PathBuf; use regex::Regex; @@ -90,20 +91,19 @@ impl<'a, 'b: 'a> Context<'a, 'b> { /// specified as well as the exe suffix fn filename_parts(target: Option<&str>) -> CargoResult<(Option<(String, String)>, String)> { - let process = try!(util::process("rustc")) - .arg("-") - .arg("--crate-name").arg("_") - .arg("--crate-type").arg("dylib") - .arg("--crate-type").arg("bin") - .arg("--print=file-names"); - let process = match target { - Some(s) => process.arg("--target").arg(s), - None => process, + let mut process = try!(util::process("rustc")); + process.arg("-") + .arg("--crate-name").arg("_") + .arg("--crate-type").arg("dylib") + .arg("--crate-type").arg("bin") + .arg("--print=file-names"); + if let Some(s) = target { + process.arg("--target").arg(s); }; let output = try!(process.exec_with_output()); - let error = str::from_utf8(&output.error).unwrap(); - let output = str::from_utf8(&output.output).unwrap(); + let error = str::from_utf8(&output.stderr).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); let mut lines = output.lines(); let nodylib = Regex::new("unsupported crate type.*dylib").unwrap(); let nobin = Regex::new("unsupported crate type.*bin").unwrap(); @@ -152,11 +152,11 @@ impl<'a, 'b: 'a> Context<'a, 'b> { let jobs = self.jobs(); self.compilation.extra_env.insert("NUM_JOBS".to_string(), - Some(jobs.to_string())); + jobs.to_string()); self.compilation.root_output = - self.layout(pkg, Kind::Target).proxy().dest().clone(); + self.layout(pkg, Kind::Target).proxy().dest().to_path_buf(); self.compilation.deps_output = - self.layout(pkg, Kind::Target).proxy().deps().clone(); + self.layout(pkg, Kind::Target).proxy().deps().to_path_buf(); return Ok(()); } @@ -212,14 +212,14 @@ impl<'a, 'b: 'a> Context<'a, 'b> { /// Returns the appropriate output directory for the specified package and /// target. - pub fn out_dir(&self, pkg: &Package, kind: Kind, target: &Target) -> Path { + pub fn out_dir(&self, pkg: &Package, kind: Kind, target: &Target) -> PathBuf { let out_dir = self.layout(pkg, kind); if target.profile().is_custom_build() { out_dir.build(pkg) } else if target.is_example() { - out_dir.examples().clone() + out_dir.examples().to_path_buf() } else { - out_dir.root().clone() + out_dir.root().to_path_buf() } } diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs index 2e270e5d8..60f936b16 100644 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; -use std::ffi::CString; -use std::old_io::fs::PathExtensions; -use std::old_io::{fs, USER_RWX, File}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; use std::str; use std::sync::Mutex; @@ -18,7 +18,7 @@ use util::Freshness; #[derive(Clone, Debug)] pub struct BuildOutput { /// Paths to pass to rustc with the `-L` flag - pub library_paths: Vec, + pub library_paths: Vec, /// Names and link kinds of libraries, suitable for the `-l` flag pub library_links: Vec, /// Metadata to pass to the immediate dependencies @@ -47,7 +47,7 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, // Building the command to execute let to_exec = try!(cx.target_filenames(target))[0].clone(); - let to_exec = script_output.join(to_exec); + let to_exec = script_output.join(&to_exec); // Start preparing the process to execute, starting out with some // environment variables. Note that the profile-related environment @@ -57,29 +57,26 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, cx.is_relevant_target(t) && !t.profile().is_custom_build() }).unwrap_or(target); let profile = cx.profile(profile_target); - let to_exec = try!(CString::new(to_exec.as_vec())); - let p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx)); - let mut p = p.env("OUT_DIR", Some(&build_output)) - .env("CARGO_MANIFEST_DIR", Some(pkg.manifest_path() - .dir_path() - .display().to_string())) - .env("NUM_JOBS", Some(cx.jobs().to_string())) - .env("TARGET", Some(match kind { - Kind::Host => cx.config.rustc_host(), - Kind::Target => cx.target_triple(), - })) - .env("DEBUG", Some(profile.debug().to_string())) - .env("OPT_LEVEL", Some(profile.opt_level().to_string())) - .env("PROFILE", Some(profile.env())) - .env("HOST", Some(cx.config.rustc_host())); + let to_exec = to_exec.into_os_string(); + let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx)); + p.env("OUT_DIR", &build_output) + .env("CARGO_MANIFEST_DIR", pkg.root()) + .env("NUM_JOBS", &cx.jobs().to_string()) + .env("TARGET", &match kind { + Kind::Host => cx.config.rustc_host(), + Kind::Target => cx.target_triple(), + }) + .env("DEBUG", &profile.debug().to_string()) + .env("OPT_LEVEL", &profile.opt_level().to_string()) + .env("PROFILE", &profile.env()) + .env("HOST", &cx.config.rustc_host()); // Be sure to pass along all enabled features for this package, this is the // last piece of statically known information that we have. match cx.resolve.features(pkg.package_id()) { Some(features) => { for feat in features.iter() { - p = p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), - Some("1")); + p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); } } None => {} @@ -107,8 +104,8 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, build_output.clone()); let plugin_deps = super::crawl_build_deps(cx, pkg, target, Kind::Host); - try!(fs::mkdir_recursive(&cx.layout(pkg, Kind::Target).build(pkg), USER_RWX)); - try!(fs::mkdir_recursive(&cx.layout(pkg, Kind::Host).build(pkg), USER_RWX)); + try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg))); + try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg))); let exec_engine = cx.exec_engine.clone(); @@ -123,7 +120,7 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, // If we have an old build directory, then just move it into place, // otherwise create it! if !build_output.exists() { - try!(fs::mkdir(&build_output, USER_RWX).chain_error(|| { + try!(fs::create_dir(&build_output).chain_error(|| { internal("failed to create script output directory for \ build command") })); @@ -133,18 +130,16 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, // along to this custom build command. We're also careful to augment our // dynamic library search path in case the build script depended on any // native dynamic libraries. - let mut p = p; { let build_state = build_state.outputs.lock().unwrap(); for &(ref name, ref id) in lib_deps.iter() { let data = &build_state[(id.clone(), kind)].metadata; for &(ref key, ref value) in data.iter() { - p = p.env(&format!("DEP_{}_{}", super::envify(name), - super::envify(key)), - Some(value)); + p.env(&format!("DEP_{}_{}", super::envify(name), + super::envify(key)), value); } } - p = try!(super::add_plugin_deps(p, &build_state, plugin_deps)); + try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps)); } // And now finally, run the build command itself! @@ -162,14 +157,15 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, // This is also the location where we provide feedback into the build // state informing what variables were discovered via our script as // well. - let output = try!(str::from_utf8(&output.output).chain_error(|| { + let output = try!(str::from_utf8(&output.stdout).chain_error(|| { human("build script output was not valid utf-8") })); let parsed_output = try!(BuildOutput::parse(output, &pkg_name)); build_state.insert(id, req, parsed_output); - try!(File::create(&build_output.dir_path().join("output")) - .write_str(output).map_err(|e| { + try!(File::create(&build_output.parent().unwrap().join("output")) + .and_then(|mut f| f.write_all(output.as_bytes())) + .map_err(|e| { human(format!("failed to write output of custom build command: {}", e)) })); @@ -187,18 +183,19 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, // // Also note that a fresh build command needs to let (freshness, dirty, fresh) = - try!(fingerprint::prepare_build_cmd(cx, pkg, kind, Some(target))); + try!(fingerprint::prepare_build_cmd(cx, pkg, kind)); let dirty = Work::new(move |tx| { try!(work.call((tx.clone()))); dirty.call(tx) }); let fresh = Work::new(move |tx| { let (id, pkg_name, build_state, build_output) = all; - let new_loc = build_output.dir_path().join("output"); + let new_loc = build_output.parent().unwrap().join("output"); let mut f = try!(File::open(&new_loc).map_err(|e| { human(format!("failed to read cached build command output: {}", e)) })); - let contents = try!(f.read_to_string()); + let mut contents = String::new(); + try!(f.read_to_string(&mut contents)); let output = try!(BuildOutput::parse(&contents, &pkg_name)); build_state.insert(id, req, output); @@ -303,7 +300,7 @@ impl BuildOutput { } pub fn parse_rustc_flags(value: &str, whence: &str) - -> CargoResult<(Vec, Vec)> { + -> CargoResult<(Vec, Vec)> { // TODO: some arguments (like paths) may contain spaces let value = value.trim(); let mut flags_iter = value.words(); @@ -326,7 +323,7 @@ impl BuildOutput { }; match flag { "-l" => library_links.push(value.to_string()), - "-L" => library_paths.push(Path::new(value)), + "-L" => library_paths.push(PathBuf::new(value)), // was already checked above _ => return Err(human("only -l and -L flags are allowed")) diff --git a/src/cargo/ops/cargo_rustc/engine.rs b/src/cargo/ops/cargo_rustc/engine.rs index 580084b9d..124a56dce 100644 --- a/src/cargo/ops/cargo_rustc/engine.rs +++ b/src/cargo/ops/cargo_rustc/engine.rs @@ -1,16 +1,15 @@ use std::collections::HashMap; -use std::env; -use std::ffi::CString; -use std::fmt::{self, Formatter}; -use std::old_io::process::ProcessOutput; -use std::old_path::BytesContainer; +use std::ffi::{AsOsStr, OsString}; +use std::fmt; +use std::path::Path; +use std::process::Output; -use util::{self, CargoResult, ProcessError, ProcessBuilder}; +use util::{CargoResult, ProcessError, ProcessBuilder, process}; /// Trait for objects that can execute commands. pub trait ExecEngine: Send + Sync { fn exec(&self, CommandPrototype) -> Result<(), ProcessError>; - fn exec_with_output(&self, CommandPrototype) -> Result; + fn exec_with_output(&self, CommandPrototype) -> Result; } /// Default implementation of `ExecEngine`. @@ -19,12 +18,12 @@ pub struct ProcessEngine; impl ExecEngine for ProcessEngine { fn exec(&self, command: CommandPrototype) -> Result<(), ProcessError> { - command.into_process_builder().unwrap().exec() + command.into_process_builder().exec() } fn exec_with_output(&self, command: CommandPrototype) - -> Result { - command.into_process_builder().unwrap().exec_with_output() + -> Result { + command.into_process_builder().exec_with_output() } } @@ -32,104 +31,64 @@ impl ExecEngine for ProcessEngine { #[derive(Clone)] pub struct CommandPrototype { ty: CommandType, - args: Vec, - env: HashMap>, - cwd: Path, + builder: ProcessBuilder, } impl CommandPrototype { pub fn new(ty: CommandType) -> CargoResult { Ok(CommandPrototype { + builder: try!(match ty { + CommandType::Rustc => process("rustc"), + CommandType::Rustdoc => process("rustdoc"), + CommandType::Target(ref s) | + CommandType::Host(ref s) => process(s), + }), ty: ty, - args: Vec::new(), - env: HashMap::new(), - cwd: try!(env::current_dir()), }) } - pub fn get_type(&self) -> &CommandType { - &self.ty - } + pub fn get_type(&self) -> &CommandType { &self.ty } - pub fn arg(mut self, arg: T) -> CommandPrototype { - self.args.push(CString::new(arg.container_as_bytes()).unwrap()); + pub fn arg(&mut self, arg: &T) -> &mut CommandPrototype { + self.builder.arg(arg); self } - pub fn args(mut self, arguments: &[T]) -> CommandPrototype { - self.args.extend(arguments.iter().map(|t| { - CString::new(t.container_as_bytes()).unwrap() - })); + pub fn args(&mut self, arguments: &[T]) -> &mut CommandPrototype { + self.builder.args(arguments); self } - pub fn get_args(&self) -> &[CString] { - &self.args - } - - pub fn cwd(mut self, path: Path) -> CommandPrototype { - self.cwd = path; + pub fn cwd(&mut self, path: &T) -> &mut CommandPrototype { + self.builder.cwd(path); self } - pub fn get_cwd(&self) -> &Path { - &self.cwd - } - - pub fn env(mut self, key: &str, - val: Option) -> CommandPrototype { - let val = val.map(|t| CString::new(t.container_as_bytes()).unwrap()); - self.env.insert(key.to_string(), val); + pub fn env(&mut self, key: &str, val: &T) + -> &mut CommandPrototype { + self.builder.env(key, val); self } - pub fn get_env(&self, var: &str) -> Option { - self.env.get(var).cloned().or_else(|| { - Some(env::var(var).ok().map(|s| CString::new(s).unwrap())) - }).and_then(|val| val) + pub fn get_args(&self) -> &[OsString] { self.builder.get_args() } + pub fn get_cwd(&self) -> &Path { self.builder.get_cwd() } + + pub fn get_env(&self, var: &str) -> Option { + self.builder.get_env(var) } - pub fn get_envs(&self) -> &HashMap> { - &self.env + pub fn get_envs(&self) -> &HashMap> { + self.builder.get_envs() } - pub fn into_process_builder(self) -> CargoResult { - let mut builder = try!(match self.ty { - CommandType::Rustc => util::process("rustc"), - CommandType::Rustdoc => util::process("rustdoc"), - CommandType::Target(ref cmd) | CommandType::Host(ref cmd) => { - util::process(cmd) - }, - }); - - for arg in self.args.into_iter() { - builder = builder.arg(arg); - } - for (key, val) in self.env.into_iter() { - builder = builder.env(&key, val.as_ref()); - } - - builder = builder.cwd(self.cwd); - - Ok(builder) + pub fn into_process_builder(self) -> ProcessBuilder { + self.builder } } impl fmt::Display for CommandPrototype { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.ty { - CommandType::Rustc => try!(write!(f, "`rustc")), - CommandType::Rustdoc => try!(write!(f, "`rustdoc")), - CommandType::Target(ref cmd) | CommandType::Host(ref cmd) => { - try!(write!(f, "`{}", String::from_utf8_lossy(cmd.as_bytes()))); - }, - } - - for arg in self.args.iter() { - try!(write!(f, " {}", String::from_utf8_lossy(arg.as_bytes()))); - } - - write!(f, "`") + self.builder.fmt(f) } } @@ -139,8 +98,8 @@ pub enum CommandType { Rustdoc, /// The command is to be executed for the target architecture. - Target(CString), + Target(OsString), /// The command is to be executed for the host architecture. - Host(CString), + Host(OsString), } diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index 1a62d0161..cc336e114 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -1,6 +1,8 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; -use std::old_io::{self, fs, File, BufferedReader}; -use std::old_io::fs::PathExtensions; +use std::fs::{self, File, OpenOptions}; +use std::io::prelude::*; +use std::io::{BufReader, SeekFrom}; +use std::path::{Path, PathBuf}; use core::{Package, Target}; use util; @@ -45,7 +47,7 @@ pub fn prepare_target<'a, 'b>(cx: &mut Context<'a, 'b>, let _p = profile::start(format!("fingerprint: {} / {:?}", pkg.package_id(), target)); let new = dir(cx, pkg, kind); - let loc = new.join(filename(target)); + let loc = new.join(&filename(target)); info!("fingerprint at: {}", loc.display()); @@ -107,7 +109,7 @@ pub struct Fingerprint { #[derive(Clone)] enum LocalFingerprint { Precalculated(String), - MtimeBased(Option, Path), + MtimeBased(Option, PathBuf), } impl Fingerprint { @@ -121,7 +123,7 @@ impl Fingerprint { LocalFingerprint::MtimeBased(Some(n), _) if !force => n.to_string(), LocalFingerprint::MtimeBased(_, ref p) => { debug!("resolving: {}", p.display()); - try!(fs::stat(p)).modified.to_string() + try!(fs::metadata(p)).modified().to_string() } }; debug!("inputs: {} {} {:?}", known, self.extra, deps); @@ -182,7 +184,7 @@ fn calculate<'a, 'b>(cx: &mut Context<'a, 'b>, // if the mtime listed is not fresh, then remove the `dep_info` file to // ensure that future calls to `resolve()` won't work. if mtime.is_none() { - let _ = fs::unlink(&dep_info); + let _ = fs::remove_file(&dep_info); } LocalFingerprint::MtimeBased(mtime, dep_info) } else { @@ -225,12 +227,8 @@ fn use_dep_info(pkg: &Package, target: &Target) -> bool { /// /// The currently implemented solution is option (1), although it is planned to /// migrate to option (2) in the near future. -pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package, kind: Kind, - target: Option<&Target>) -> CargoResult { - if target.is_none() { - return Ok((Fresh, Work::noop(), Work::noop())); - } - +pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package, kind: Kind) + -> CargoResult { let _p = profile::start(format!("fingerprint build cmd: {}", pkg.package_id())); let new = dir(cx, pkg, kind); @@ -247,14 +245,6 @@ pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package, kind: Kind, let is_fresh = try!(is_fresh(&loc, &new_fingerprint)); - // The new custom build command infrastructure handles its own output - // directory as part of freshness. - if target.is_none() { - let native_dir = cx.layout(pkg, kind).native(pkg); - cx.compilation.native_dirs.insert(pkg.package_id().clone(), - native_dir); - } - Ok(prepare(is_fresh, loc, new_fingerprint)) } @@ -266,13 +256,13 @@ pub fn prepare_init(cx: &mut Context, pkg: &Package, kind: Kind) let work1 = Work::new(move |_| { if !new1.exists() { - try!(fs::mkdir(&new1, old_io::USER_DIR)); + try!(fs::create_dir(&new1)); } Ok(()) }); let work2 = Work::new(move |_| { if !new2.exists() { - try!(fs::mkdir(&new2, old_io::USER_DIR)); + try!(fs::create_dir(&new2)); } Ok(()) }); @@ -282,13 +272,14 @@ pub fn prepare_init(cx: &mut Context, pkg: &Package, kind: Kind) /// Given the data to build and write a fingerprint, generate some Work /// instances to actually perform the necessary work. -fn prepare(is_fresh: bool, loc: Path, fingerprint: Fingerprint) -> Preparation { +fn prepare(is_fresh: bool, loc: PathBuf, fingerprint: Fingerprint) -> Preparation { let write_fingerprint = Work::new(move |_| { debug!("write fingerprint: {}", loc.display()); let fingerprint = try!(fingerprint.resolve(true).chain_error(|| { internal("failed to resolve a pending fingerprint") })); - try!(File::create(&loc).write_str(&fingerprint)); + let mut f = try!(File::create(&loc)); + try!(f.write_all(fingerprint.as_bytes())); Ok(()) }); @@ -296,14 +287,14 @@ fn prepare(is_fresh: bool, loc: Path, fingerprint: Fingerprint) -> Preparation { } /// Return the (old, new) location for fingerprints for a package -pub fn dir(cx: &Context, pkg: &Package, kind: Kind) -> Path { +pub fn dir(cx: &Context, pkg: &Package, kind: Kind) -> PathBuf { cx.layout(pkg, kind).proxy().fingerprint(pkg) } /// Returns the (old, new) location for the dep info file of a target. pub fn dep_info_loc(cx: &Context, pkg: &Package, target: &Target, - kind: Kind) -> Path { - dir(cx, pkg, kind).join(format!("dep-{}", filename(target))) + kind: Kind) -> PathBuf { + dir(cx, pkg, kind).join(&format!("dep-{}", filename(target))) } fn is_fresh(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult { @@ -312,7 +303,8 @@ fn is_fresh(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult { Err(..) => return Ok(false), }; - let old_fingerprint = try!(file.read_to_string()); + let mut old_fingerprint = String::new(); + try!(file.read_to_string(&mut old_fingerprint)); let new_fingerprint = match new_fingerprint.resolve(false) { Ok(s) => s, Err(..) => return Ok(false), @@ -328,16 +320,17 @@ fn calculate_target_mtime(dep_info: &Path) -> CargoResult> { macro_rules! fs_try { ($e:expr) => (match $e { Ok(e) => e, Err(..) => return Ok(None) }) } - let mut f = BufferedReader::new(fs_try!(File::open(dep_info))); + let mut f = BufReader::new(fs_try!(File::open(dep_info))); // see comments in append_current_dir for where this cwd is manifested from. - let cwd = fs_try!(f.read_until(0)); - let cwd = Path::new(&cwd[..cwd.len()-1]); + let mut cwd = Vec::new(); + fs_try!(f.read_until(0, &mut cwd)); + let cwd = try!(util::bytes2path(&cwd[..cwd.len()-1])); let line = match f.lines().next() { Some(Ok(line)) => line, _ => return Ok(None), }; - let mtime = try!(fs::stat(dep_info)).modified; - let pos = try!(line.find_str(": ").chain_error(|| { + let mtime = try!(fs::metadata(dep_info)).modified(); + let pos = try!(line.find(": ").chain_error(|| { internal(format!("dep-info not in an understood format: {}", dep_info.display())) })); @@ -354,10 +347,10 @@ fn calculate_target_mtime(dep_info: &Path) -> CargoResult> { file.push(' '); file.push_str(deps.next().unwrap()) } - match fs::stat(&cwd.join(&file)) { - Ok(stat) if stat.modified <= mtime => {} - Ok(stat) => { - info!("stale: {} -- {} vs {}", file, stat.modified, mtime); + match fs::metadata(&cwd.join(&file)) { + Ok(ref stat) if stat.modified() <= mtime => {} + Ok(ref stat) => { + info!("stale: {} -- {} vs {}", file, stat.modified(), mtime); return Ok(None) } _ => { info!("stale: {} -- missing", file); return Ok(None) } @@ -401,10 +394,11 @@ fn filename(target: &Target) -> String { // next time. pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> { debug!("appending {} <- {}", path.display(), cwd.display()); - let mut f = try!(File::open_mode(path, old_io::Open, old_io::ReadWrite)); - let contents = try!(f.read_to_end()); - try!(f.seek(0, old_io::SeekSet)); - try!(f.write_all(cwd.as_vec())); + let mut f = try!(OpenOptions::new().read(true).write(true).open(path)); + let mut contents = Vec::new(); + try!(f.read_to_end(&mut contents)); + try!(f.seek(SeekFrom::Start(0))); + try!(f.write_all(try!(util::path2bytes(cwd)))); try!(f.write_all(&[0])); try!(f.write_all(&contents)); Ok(()) diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs index a83887d45..7b042691b 100644 --- a/src/cargo/ops/cargo_rustc/job_queue.rs +++ b/src/cargo/ops/cargo_rustc/job_queue.rs @@ -1,8 +1,9 @@ use std::collections::HashSet; use std::collections::hash_map::HashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; -use std::sync::TaskPool; use std::sync::mpsc::{channel, Sender, Receiver}; + +use threadpool::ThreadPool; use term::color::YELLOW; use core::{Package, PackageId, Resolve, PackageSet}; @@ -17,7 +18,7 @@ use super::job::Job; /// actual compilation step of each package. Packages enqueue units of work and /// then later on the entire graph is processed and compiled. pub struct JobQueue<'a> { - pool: TaskPool, + pool: ThreadPool, queue: DependencyQueue<(&'a PackageId, Stage), (&'a Package, Vec<(Job, Freshness)>)>, tx: Sender, @@ -67,7 +68,7 @@ impl<'a> JobQueue<'a> { -> JobQueue<'a> { let (tx, rx) = channel(); JobQueue { - pool: TaskPool::new(jobs as usize), + pool: ThreadPool::new(jobs as usize), queue: DependencyQueue::new(), tx: tx, rx: rx, diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs index 6f5f68f72..5aed2effe 100644 --- a/src/cargo/ops/cargo_rustc/layout.rs +++ b/src/cargo/ops/cargo_rustc/layout.rs @@ -45,19 +45,21 @@ //! .fingerprint/ //! ``` -use std::old_io::fs::PathExtensions; -use std::old_io::{self, fs, IoResult}; +use std::fs; +use std::io::prelude::*; +use std::io; +use std::path::{PathBuf, Path}; use core::Package; use util::hex::short_hash; pub struct Layout { - root: Path, - deps: Path, - native: Path, - build: Path, - fingerprint: Path, - examples: Path, + root: PathBuf, + deps: PathBuf, + native: PathBuf, + build: PathBuf, + fingerprint: PathBuf, + examples: PathBuf, } pub struct LayoutProxy<'a> { @@ -79,7 +81,7 @@ impl Layout { Layout::at(path) } - pub fn at(root: Path) -> Layout { + pub fn at(root: PathBuf) -> Layout { Layout { deps: root.join("deps"), native: root.join("native"), @@ -90,9 +92,9 @@ impl Layout { } } - pub fn prepare(&mut self) -> IoResult<()> { + pub fn prepare(&mut self) -> io::Result<()> { if !self.root.exists() { - try!(fs::mkdir_recursive(&self.root, old_io::USER_RWX)); + try!(fs::create_dir_all(&self.root)); } try!(mkdir(&self.deps)); @@ -103,9 +105,9 @@ impl Layout { return Ok(()); - fn mkdir(dir: &Path) -> IoResult<()> { + fn mkdir(dir: &Path) -> io::Result<()> { if !dir.exists() { - try!(fs::mkdir(dir, old_io::USER_DIR)); + try!(fs::create_dir(dir)); } Ok(()) } @@ -115,19 +117,15 @@ impl Layout { pub fn deps<'a>(&'a self) -> &'a Path { &self.deps } pub fn examples<'a>(&'a self) -> &'a Path { &self.examples } - // TODO: deprecated, remove - pub fn native(&self, package: &Package) -> Path { - self.native.join(self.pkg_dir(package)) - } - pub fn fingerprint(&self, package: &Package) -> Path { - self.fingerprint.join(self.pkg_dir(package)) + pub fn fingerprint(&self, package: &Package) -> PathBuf { + self.fingerprint.join(&self.pkg_dir(package)) } - pub fn build(&self, package: &Package) -> Path { - self.build.join(self.pkg_dir(package)) + pub fn build(&self, package: &Package) -> PathBuf { + self.build.join(&self.pkg_dir(package)) } - pub fn build_out(&self, package: &Package) -> Path { + pub fn build_out(&self, package: &Package) -> PathBuf { self.build(package).join("out") } @@ -151,12 +149,9 @@ impl<'a> LayoutProxy<'a> { pub fn examples(&self) -> &'a Path { self.root.examples() } - // TODO: deprecated, remove - pub fn native(&self, pkg: &Package) -> Path { self.root.native(pkg) } + pub fn build(&self, pkg: &Package) -> PathBuf { self.root.build(pkg) } - pub fn build(&self, pkg: &Package) -> Path { self.root.build(pkg) } - - pub fn build_out(&self, pkg: &Package) -> Path { self.root.build_out(pkg) } + pub fn build_out(&self, pkg: &Package) -> PathBuf { self.root.build_out(pkg) } pub fn proxy(&self) -> &'a Layout { self.root } } diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index 60711242a..3608429ab 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -1,9 +1,10 @@ use std::collections::{HashSet, HashMap}; use std::dynamic_lib::DynamicLibrary; -use std::ffi::CString; -use std::old_io::fs::{self, PathExtensions}; -use std::os; -use std::old_path; +use std::env; +use std::ffi::{OsStr, AsOsStr, OsString}; +use std::fs; +use std::io::prelude::*; +use std::path::{self, PathBuf}; use std::sync::Arc; use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve}; @@ -53,34 +54,10 @@ pub struct TargetConfig { /// The second element of the tuple returned is the target triple that rustc /// is a host for. pub fn rustc_version() -> CargoResult<(String, String)> { - rustc_new_version().or_else(|_| rustc_old_version()) -} - -pub fn rustc_old_version() -> CargoResult<(String, String)> { - let output = try!(try!(util::process("rustc")) - .arg("-v") - .arg("verbose") - .exec_with_output()); - let output = try!(String::from_utf8(output.output).map_err(|_| { - internal("rustc -v didn't return utf8 output") - })); - let triple = { - let triple = output.lines().filter(|l| { - l.starts_with("host: ") - }).map(|l| &l[6..]).next(); - let triple = try!(triple.chain_error(|| { - internal("rustc -v didn't have a line for `host:`") - })); - triple.to_string() - }; - Ok((output, triple)) -} - -pub fn rustc_new_version() -> CargoResult<(String, String)> { let output = try!(try!(util::process("rustc")) .arg("-vV") .exec_with_output()); - let output = try!(String::from_utf8(output.output).map_err(|_| { + let output = try!(String::from_utf8(output.stdout).map_err(|_| { internal("rustc -v didn't return utf8 output") })); let triple = { @@ -188,7 +165,7 @@ pub fn compile_targets<'a, 'b>(env: &str, let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg) .display().to_string(); - cx.compilation.extra_env.insert("OUT_DIR".to_string(), Some(out_dir)); + cx.compilation.extra_env.insert("OUT_DIR".to_string(), out_dir); if let Some(feats) = cx.resolve.features(pkg.package_id()) { cx.compilation.features.extend(feats.iter().cloned()); @@ -340,12 +317,14 @@ fn rustc(package: &Package, target: &Target, let plugin_deps = crawl_build_deps(cx, package, target, Kind::Host); - return rustcs.into_iter().map(|(rustc, kind)| { + return rustcs.into_iter().map(|(mut rustc, kind)| { let name = package.name().to_string(); let is_path_source = package.package_id().source_id().is_path(); let show_warnings = package.package_id() == cx.resolve.root() || is_path_source; - let rustc = if show_warnings {rustc} else {rustc.arg("-Awarnings")}; + if !show_warnings { + rustc.arg("-Awarnings"); + } let exec_engine = cx.exec_engine.clone(); let filenames = try!(cx.target_filenames(target)); @@ -366,12 +345,12 @@ fn rustc(package: &Package, target: &Target, t.is_lib() }); - let rustc_dep_info_loc = root.join(target.file_stem()).with_extension("d"); + let rustc_dep_info_loc = root.join(&target.file_stem()) + .with_extension("d"); let dep_info_loc = fingerprint::dep_info_loc(cx, package, target, kind); - let cwd = cx.config.cwd().clone(); + let cwd = cx.config.cwd().to_path_buf(); Ok((Work::new(move |desc_tx| { - let mut rustc = rustc; debug!("about to run: {}", rustc); // Only at runtime have we discovered what the extra -L and -l @@ -380,9 +359,9 @@ fn rustc(package: &Package, target: &Target, // dynamic library load path as a plugin's dynamic library may be // located somewhere in there. let build_state = build_state.outputs.lock().unwrap(); - rustc = add_native_deps(rustc, &*build_state, native_lib_deps, - kind, pass_l_flag, ¤t_id); - rustc = try!(add_plugin_deps(rustc, &*build_state, plugin_deps)); + add_native_deps(&mut rustc, &*build_state, native_lib_deps, + kind, pass_l_flag, ¤t_id); + try!(add_plugin_deps(&mut rustc, &*build_state, plugin_deps)); drop(build_state); // FIXME(rust-lang/rust#18913): we probably shouldn't have to do @@ -390,7 +369,7 @@ fn rustc(package: &Package, target: &Target, for filename in filenames.iter() { let dst = root.join(filename); if dst.exists() { - try!(fs::unlink(&dst)); + try!(fs::remove_file(&dst)); } } @@ -409,25 +388,24 @@ fn rustc(package: &Package, target: &Target, // Add all relevant -L and -l flags from dependencies (now calculated and // present in `state`) to the command provided - fn add_native_deps(mut rustc: CommandPrototype, + fn add_native_deps(rustc: &mut CommandPrototype, build_state: &BuildMap, native_lib_deps: Vec, kind: Kind, pass_l_flag: bool, - current_id: &PackageId) -> CommandPrototype { + current_id: &PackageId) { for id in native_lib_deps.into_iter() { debug!("looking up {} {:?}", id, kind); let output = &build_state[(id.clone(), kind)]; for path in output.library_paths.iter() { - rustc = rustc.arg("-L").arg(path); + rustc.arg("-L").arg(path); } if pass_l_flag && id == *current_id { for name in output.library_links.iter() { - rustc = rustc.arg("-l").arg(name); + rustc.arg("-l").arg(name); } } } - return rustc; } } @@ -460,15 +438,13 @@ fn crawl_build_deps<'a>(cx: &'a Context, pkg: &'a Package, // For all plugin dependencies, add their -L paths (now calculated and // present in `state`) to the dynamic library load path for the command to // execute. -#[allow(deprecated)] // need an OsStr based Command -fn add_plugin_deps(rustc: CommandPrototype, +fn add_plugin_deps(rustc: &mut CommandPrototype, build_state: &BuildMap, plugin_deps: Vec) - -> CargoResult { + -> CargoResult<()> { let var = DynamicLibrary::envvar(); - let search_path = rustc.get_env(var) - .unwrap_or(CString::from_slice(b"")); - let mut search_path = os::split_paths(search_path); + let search_path = rustc.get_env(var).unwrap_or(OsString::new()); + let mut search_path = env::split_paths(&search_path).collect::>(); for id in plugin_deps.into_iter() { let output = &build_state[(id, Kind::Host)]; for path in output.library_paths.iter() { @@ -476,21 +452,22 @@ fn add_plugin_deps(rustc: CommandPrototype, } } let search_path = try!(join_paths(&search_path, var)); - Ok(rustc.env(var, Some(search_path))) + rustc.env(var, &search_path); + Ok(()) } fn prepare_rustc(package: &Package, target: &Target, crate_types: Vec<&str>, cx: &Context, req: Platform) -> CargoResult> { - let base = try!(process(CommandType::Rustc, package, target, cx)); - let base = build_base_args(cx, base, package, target, &crate_types); + let mut base = try!(process(CommandType::Rustc, package, target, cx)); + build_base_args(cx, &mut base, package, target, &crate_types); - let target_cmd = build_plugin_args(base.clone(), cx, package, target, Kind::Target); - let plugin_cmd = build_plugin_args(base, cx, package, target, Kind::Host); - let target_cmd = try!(build_deps_args(target_cmd, target, package, cx, - Kind::Target)); - let plugin_cmd = try!(build_deps_args(plugin_cmd, target, package, cx, - Kind::Host)); + let mut target_cmd = base.clone(); + let mut plugin_cmd = base; + build_plugin_args(&mut target_cmd, cx, package, target, Kind::Target); + build_plugin_args(&mut plugin_cmd, cx, package, target, Kind::Host); + try!(build_deps_args(&mut target_cmd, target, package, cx, Kind::Target)); + try!(build_deps_args(&mut plugin_cmd, target, package, cx, Kind::Host)); Ok(match req { Platform::Target => vec![(target_cmd, Kind::Target)], @@ -507,28 +484,26 @@ fn rustdoc(package: &Package, target: &Target, cx: &mut Context) -> CargoResult { let kind = Kind::Target; let cx_root = cx.layout(package, kind).proxy().dest().join("doc"); - let rustdoc = try!(process(CommandType::Rustdoc, package, target, cx)); - let mut rustdoc = rustdoc.arg(root_path(cx, package, target)) - .cwd(cx.config.cwd().clone()) - .arg("-o").arg(cx_root) - .arg("--crate-name").arg(target.name()); + let mut rustdoc = try!(process(CommandType::Rustdoc, package, target, cx)); + rustdoc.arg(&root_path(cx, package, target)) + .cwd(cx.config.cwd()) + .arg("-o").arg(&cx_root) + .arg("--crate-name").arg(target.name()); match cx.resolve.features(package.package_id()) { Some(features) => { - for feat in features.iter() { - rustdoc = rustdoc.arg("--cfg").arg(format!("feature=\"{}\"", feat)); + for feat in features { + rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } } None => {} } - let mut rustdoc = try!(build_deps_args(rustdoc, target, package, cx, kind)); + try!(build_deps_args(&mut rustdoc, target, package, cx, kind)); - rustdoc = rustdoc.env("OUT_DIR", if package.has_custom_build() { - Some(cx.layout(package, kind).build_out(package)) - } else { - None - }); + if package.has_custom_build() { + rustdoc.env("OUT_DIR", &cx.layout(package, kind).build_out(package)); + } trace!("commands={}", rustdoc); @@ -569,33 +544,33 @@ fn rustdoc(package: &Package, target: &Target, // path is only actually relative if the current directory is an ancestor if it. // This means that non-path dependencies (git/registry) will likely be shown as // absolute paths instead of relative paths. -fn root_path(cx: &Context, pkg: &Package, target: &Target) -> Path { +fn root_path(cx: &Context, pkg: &Package, target: &Target) -> PathBuf { let absolute = pkg.root().join(target.src_path()); let cwd = cx.config.cwd(); - if cwd.is_ancestor_of(&absolute) { - absolute.path_relative_from(cwd).unwrap_or(absolute) + if absolute.starts_with(cwd) { + absolute.relative_from(cwd).map(|s| s.to_path_buf()).unwrap_or(absolute) } else { absolute } } fn build_base_args(cx: &Context, - mut cmd: CommandPrototype, + cmd: &mut CommandPrototype, pkg: &Package, target: &Target, - crate_types: &[&str]) -> CommandPrototype { + crate_types: &[&str]) { let metadata = target.metadata(); // Move to cwd so the root_path() passed below is actually correct - cmd = cmd.cwd(cx.config.cwd().clone()); + cmd.cwd(cx.config.cwd()); // TODO: Handle errors in converting paths into args - cmd = cmd.arg(root_path(cx, pkg, target)); + cmd.arg(&root_path(cx, pkg, target)); - cmd = cmd.arg("--crate-name").arg(target.name()); + cmd.arg("--crate-name").arg(target.name()); for crate_type in crate_types.iter() { - cmd = cmd.arg("--crate-type").arg(*crate_type); + cmd.arg("--crate-type").arg(crate_type); } // Despite whatever this target's profile says, we need to configure it @@ -606,37 +581,37 @@ fn build_base_args(cx: &Context, (crate_types.contains(&"dylib") && pkg.package_id() != cx.resolve.root()); if prefer_dynamic { - cmd = cmd.arg("-C").arg("prefer-dynamic"); + cmd.arg("-C").arg("prefer-dynamic"); } if profile.opt_level() != 0 { - cmd = cmd.arg("-C").arg(format!("opt-level={}", profile.opt_level())); + cmd.arg("-C").arg(&format!("opt-level={}", profile.opt_level())); } if (target.is_bin() || target.is_staticlib()) && profile.lto() { - cmd = cmd.args(&["-C", "lto"]); + cmd.args(&["-C", "lto"]); } else { // There are some restrictions with LTO and codegen-units, so we // only add codegen units when LTO is not used. match profile.codegen_units() { - Some(n) => cmd = cmd.arg("-C").arg(format!("codegen-units={}", n)), + Some(n) => { cmd.arg("-C").arg(&format!("codegen-units={}", n)); } None => {}, } } if profile.debug() { - cmd = cmd.arg("-g"); + cmd.arg("-g"); } else { - cmd = cmd.args(&["--cfg", "ndebug"]); + cmd.args(&["--cfg", "ndebug"]); } if profile.is_test() && profile.uses_test_harness() { - cmd = cmd.arg("--test"); + cmd.arg("--test"); } match cx.resolve.features(pkg.package_id()) { Some(features) => { for feat in features.iter() { - cmd = cmd.arg("--cfg").arg(format!("feature=\"{}\"", feat)); + cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } } None => {} @@ -644,62 +619,58 @@ fn build_base_args(cx: &Context, match metadata { Some(m) => { - cmd = cmd.arg("-C").arg(format!("metadata={}", m.metadata)); - cmd = cmd.arg("-C").arg(format!("extra-filename={}", m.extra_filename)); + cmd.arg("-C").arg(&format!("metadata={}", m.metadata)); + cmd.arg("-C").arg(&format!("extra-filename={}", m.extra_filename)); } None => {} } if profile.rpath() { - cmd = cmd.arg("-C").arg("rpath"); + cmd.arg("-C").arg("rpath"); } - - return cmd; } -fn build_plugin_args(mut cmd: CommandPrototype, cx: &Context, pkg: &Package, - target: &Target, kind: Kind) -> CommandPrototype { - cmd = cmd.arg("--out-dir"); - cmd = cmd.arg(cx.out_dir(pkg, kind, target)); - - cmd = cmd.arg("--emit=dep-info,link"); +fn build_plugin_args(cmd: &mut CommandPrototype, cx: &Context, pkg: &Package, + target: &Target, kind: Kind) { + cmd.arg("--out-dir").arg(&cx.out_dir(pkg, kind, target)); + cmd.arg("--emit=dep-info,link"); if kind == Kind::Target { - fn opt(cmd: CommandPrototype, key: &str, prefix: &str, - val: Option<&str>) -> CommandPrototype { - match val { - Some(val) => { - cmd.arg(key) - .arg(format!("{}{}", prefix, val)) - } - None => cmd + fn opt(cmd: &mut CommandPrototype, key: &str, prefix: &str, + val: Option<&str>) { + if let Some(val) = val { + cmd.arg(key).arg(&format!("{}{}", prefix, val)); } } - cmd = opt(cmd, "--target", "", cx.requested_target()); - cmd = opt(cmd, "-C", "ar=", cx.ar(kind)); - cmd = opt(cmd, "-C", "linker=", cx.linker(kind)); + opt(cmd, "--target", "", cx.requested_target()); + opt(cmd, "-C", "ar=", cx.ar(kind)); + opt(cmd, "-C", "linker=", cx.linker(kind)); } - - return cmd; } -fn build_deps_args(mut cmd: CommandPrototype, target: &Target, package: &Package, - cx: &Context, - kind: Kind) -> CargoResult { +fn build_deps_args(cmd: &mut CommandPrototype, target: &Target, + package: &Package, cx: &Context, kind: Kind) + -> CargoResult<()> { let layout = cx.layout(package, kind); - cmd = cmd.arg("-L").arg(format!("dependency={}", layout.root().display())); - cmd = cmd.arg("-L").arg(format!("dependency={}", layout.deps().display())); - - cmd = cmd.env("OUT_DIR", if package.has_custom_build() { - Some(layout.build_out(package)) - } else { - None + cmd.arg("-L").arg(&{ + let mut root = OsString::from_str("dependency="); + root.push_os_str(layout.root().as_os_str()); + root + }); + cmd.arg("-L").arg(&{ + let mut deps = OsString::from_str("dependency="); + deps.push_os_str(layout.deps().as_os_str()); + deps }); + if package.has_custom_build() { + cmd.env("OUT_DIR", &layout.build_out(package)); + } + for &(pkg, target) in cx.dep_targets(package, target).iter() { - cmd = try!(link_to(cmd, pkg, target, cx, kind)); + try!(link_to(cmd, pkg, target, cx, kind)); } let targets = package.targets().iter().filter(|target| { @@ -709,14 +680,14 @@ fn build_deps_args(mut cmd: CommandPrototype, target: &Target, package: &Package if (target.is_bin() || target.is_example()) && !target.profile().is_custom_build() { for target in targets.filter(|f| f.is_rlib() || f.is_dylib()) { - cmd = try!(link_to(cmd, package, target, cx, kind)); + try!(link_to(cmd, package, target, cx, kind)); } } - return Ok(cmd); + return Ok(()); - fn link_to(mut cmd: CommandPrototype, pkg: &Package, target: &Target, - cx: &Context, kind: Kind) -> CargoResult { + fn link_to(cmd: &mut CommandPrototype, pkg: &Package, target: &Target, + cx: &Context, kind: Kind) -> CargoResult<()> { // If this target is itself a plugin *or* if it's being linked to a // plugin, then we want the plugin directory. Otherwise we want the // target directory (hence the || here). @@ -727,16 +698,17 @@ fn build_deps_args(mut cmd: CommandPrototype, target: &Target, package: &Package }); for filename in try!(cx.target_filenames(target)).iter() { - if filename.as_bytes().ends_with(b".a") { continue } - let mut v = Vec::new(); - v.push_all(target.name().as_bytes()); - v.push(b'='); - v.push_all(layout.root().as_vec()); - v.push(old_path::SEP_BYTE); - v.push_all(filename.as_bytes()); - cmd = cmd.arg("--extern").arg(&v); + if filename.ends_with(".a") { continue } + let mut v = OsString::new(); + v.push_os_str(OsStr::from_str(target.name())); + v.push_os_str(OsStr::from_str("=")); + v.push_os_str(layout.root().as_os_str()); + let s = path::MAIN_SEPARATOR.to_string(); + v.push_os_str(OsStr::from_str(&s)); + v.push_os_str(OsStr::from_str(&filename)); + cmd.arg("--extern").arg(&v); } - return Ok(cmd); + Ok(()) } } @@ -745,14 +717,15 @@ pub fn process(cmd: CommandType, pkg: &Package, _target: &Target, // When invoking a tool, we need the *host* deps directory in the dynamic // library search path for plugins and such which have dynamic dependencies. let layout = cx.layout(pkg, Kind::Host); - let mut search_path = DynamicLibrary::search_path(); - search_path.push(layout.deps().clone()); + let mut search_path = util::dylib_path(); + search_path.push(layout.deps().to_path_buf()); // We want to use the same environment and such as normal processes, but we // want to override the dylib search path with the one we just calculated. let search_path = try!(join_paths(&search_path, DynamicLibrary::envvar())); - Ok(try!(cx.compilation.process(cmd, pkg)) - .env(DynamicLibrary::envvar(), Some(&search_path))) + let mut cmd = try!(cx.compilation.process(cmd, pkg)); + cmd.env(DynamicLibrary::envvar(), &search_path); + Ok(cmd) } fn each_dep<'a, F>(pkg: &Package, cx: &'a Context, mut f: F) diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index 2c4ed04ac..d4faec1d8 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -1,3 +1,5 @@ +use std::ffi::{OsStr, OsString, AsOsStr}; +use std::path::Path; use core::Source; use sources::PathSource; @@ -14,7 +16,7 @@ pub fn run_tests(manifest_path: &Path, options: &TestOptions, test_args: &[String]) -> CargoResult> { let config = options.compile_opts.config; - let mut source = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(), config)); try!(source.update()); @@ -29,12 +31,12 @@ pub fn run_tests(manifest_path: &Path, let cwd = config.cwd(); for &(_, ref exe) in tests_to_run { - let to_display = match exe.path_relative_from(&cwd) { + let to_display = match exe.relative_from(&cwd) { Some(path) => path, - None => exe.clone(), + None => &**exe, }; - let cmd = try!(compile.target_process(exe, &compile.package)) - .args(test_args); + let mut cmd = try!(compile.target_process(exe, &compile.package)); + cmd.args(test_args); try!(config.shell().concise(|shell| { shell.status("Running", to_display.display().to_string()) })); @@ -60,28 +62,27 @@ pub fn run_tests(manifest_path: &Path, for (lib, name) in libs { try!(config.shell().status("Doc-tests", name)); - let mut p = try!(compile.rustdoc_process(&compile.package)) - .arg("--test").arg(lib) - .arg("--crate-name").arg(name) - .arg("-L").arg(&compile.root_output) - .arg("-L").arg(&compile.deps_output) - .cwd(compile.package.root()); + let mut p = try!(compile.rustdoc_process(&compile.package)); + p.arg("--test").arg(lib) + .arg("--crate-name").arg(name) + .arg("-L").arg(&compile.root_output) + .arg("-L").arg(&compile.deps_output) + .cwd(compile.package.root()); - // FIXME(rust-lang/rust#16272): this should just always be passed. if test_args.len() > 0 { - p = p.arg("--test-args").arg(test_args.connect(" ")); + p.arg("--test-args").arg(&test_args.connect(" ")); } for feat in compile.features.iter() { - p = p.arg("--cfg").arg(format!("feature=\"{}\"", feat)); + p.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } for (pkg, libs) in compile.libraries.iter() { for lib in libs.iter() { - let mut arg = pkg.name().as_bytes().to_vec(); - arg.push(b'='); - arg.push_all(lib.as_vec()); - p = p.arg("--extern").arg(arg); + let mut arg = OsString::from_str(pkg.name()); + arg.push_os_str(OsStr::from_str("=")); + arg.push_os_str(lib.as_os_str()); + p.arg("--extern").arg(&arg); } } diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index da7b5ef66..12e08c071 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -1,4 +1,6 @@ -use std::old_io::File; +use std::fs::File; +use std::io::prelude::*; +use std::path::Path; use rustc_serialize::{Encodable, Decodable}; use toml::{self, Encoder, Value}; @@ -8,7 +10,7 @@ use util::{CargoResult, ChainError, human}; use util::toml as cargo_toml; pub fn load_pkg_lockfile(pkg: &Package) -> CargoResult> { - let lockfile = pkg.manifest_path().dir_path().join("Cargo.lock"); + let lockfile = pkg.root().join("Cargo.lock"); let source_id = pkg.package_id().source_id(); load_lockfile(&lockfile, source_id).chain_error(|| { human(format!("failed to parse lock file at: {}", lockfile.display())) @@ -22,7 +24,8 @@ pub fn load_lockfile(path: &Path, sid: &SourceId) -> CargoResult Err(_) => return Ok(None) }; - let s = try!(f.read_to_string()); + let mut s = String::new(); + try!(f.read_to_string(&mut s)); let table = toml::Value::Table(try!(cargo_toml::parse(&s, path))); let mut d = toml::Decoder::new(table); @@ -65,7 +68,7 @@ pub fn write_lockfile(dst: &Path, resolve: &Resolve) -> CargoResult<()> { None => {} } - try!(File::create(dst).write_str(&out)); + try!(try!(File::create(dst)).write_all(out.as_bytes())); Ok(()) } diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index b62e5b204..8e8adfba1 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -1,8 +1,9 @@ use std::collections::HashMap; use std::env; +use std::fs::File; +use std::io::prelude::*; use std::iter::repeat; -use std::old_io::File; -use std::old_io::fs::PathExtensions; +use std::path::{Path, PathBuf}; use curl::http; use git2; @@ -30,7 +31,7 @@ pub fn publish(manifest_path: &Path, token: Option, index: Option, verify: bool) -> CargoResult<()> { - let mut src = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), config)); try!(src.update()); let pkg = try!(src.root_package()); @@ -96,9 +97,13 @@ fn transmit(pkg: &Package, tarball: &Path, registry: &mut Registry) let readme = match *readme { Some(ref readme) => { let path = pkg.root().join(readme); - Some(try!(File::open(&path).read_to_string().chain_error(|| { + let mut contents = String::new(); + try!(File::open(&path).and_then(|mut f| { + f.read_to_string(&mut contents) + }).chain_error(|| { human("failed to read the specified README") - }))) + })); + Some(contents) } None => None, }; @@ -224,7 +229,7 @@ pub fn http_timeout(config: &Config) -> CargoResult> { pub fn registry_login(config: &Config, token: String) -> CargoResult<()> { let RegistryConfig { index, token: _ } = try!(registry_configuration(config)); let mut map = HashMap::new(); - let p = config.cwd().clone(); + let p = config.cwd().to_path_buf(); match index { Some(index) => { map.insert("index".to_string(), ConfigValue::String(index, p.clone())); @@ -234,7 +239,7 @@ pub fn registry_login(config: &Config, token: String) -> CargoResult<()> { map.insert("token".to_string(), ConfigValue::String(token, p)); config::set_config(config, Location::Global, "registry", - ConfigValue::Table(map, Path::new("."))) + ConfigValue::Table(map, PathBuf::new("."))) } pub struct OwnersOptions { @@ -251,7 +256,7 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { Some(ref name) => name.clone(), None => { let manifest_path = try!(find_root_manifest_for_cwd(None)); - let mut src = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), config)); try!(src.update()); let pkg = try!(src.root_package()); @@ -314,7 +319,7 @@ pub fn yank(config: &Config, Some(name) => name, None => { let manifest_path = try!(find_root_manifest_for_cwd(None)); - let mut src = try!(PathSource::for_path(&manifest_path.dir_path(), + let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), config)); try!(src.update()); let pkg = try!(src.root_package()); diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index 7b9e9c27c..3b4f866b6 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -1,6 +1,8 @@ use std::fmt::{self, Debug, Formatter}; use std::hash::{Hash, Hasher, SipHasher}; use std::mem; +use std::path::PathBuf; + use url::{self, Url}; use core::source::{Source, SourceId}; @@ -15,8 +17,8 @@ use sources::git::utils::{GitRemote, GitRevision}; pub struct GitSource<'a, 'b:'a> { remote: GitRemote, reference: GitReference, - db_path: Path, - checkout_path: Path, + db_path: PathBuf, + checkout_path: PathBuf, source_id: SourceId, path_source: Option>, rev: Option, @@ -44,8 +46,8 @@ impl<'a, 'b> GitSource<'a, 'b> { GitReference::Rev(ref s) => s.to_string(), }; let checkout_path = config.git_checkout_path() - .join(ident) - .join(reference_path); + .join(&ident) + .join(&reference_path); let reference = match source_id.precise() { Some(s) => GitReference::Rev(s.to_string()), diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index 9861ed6e7..d467cbe21 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -1,6 +1,8 @@ -use std::fmt::{self, Formatter}; -use std::old_io::{USER_DIR}; -use std::old_io::fs::{mkdir_recursive, rmdir_recursive, PathExtensions}; +use std::fmt; +use std::path::{Path, PathBuf}; +use std::fs; +use std::io::prelude::*; + use rustc_serialize::{Encodable, Encoder}; use url::Url; use git2::{self, ObjectType}; @@ -13,7 +15,7 @@ use util::{CargoResult, ChainError, human, ToUrl, internal}; pub struct GitRevision(git2::Oid); impl fmt::Display for GitRevision { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } @@ -42,7 +44,7 @@ impl Encodable for GitRemote { /// GitCheckouts can be cloned from this GitDatabase. pub struct GitDatabase { remote: GitRemote, - path: Path, + path: PathBuf, repo: git2::Repository, } @@ -66,7 +68,7 @@ impl Encodable for GitDatabase { /// and return a CargoError if no revision for that reference was found. pub struct GitCheckout<'a> { database: &'a GitDatabase, - location: Path, + location: PathBuf, revision: GitRevision, repo: git2::Repository, } @@ -123,14 +125,18 @@ impl GitRemote { } }; - Ok(GitDatabase { remote: self.clone(), path: into.clone(), repo: repo }) + Ok(GitDatabase { + remote: self.clone(), + path: into.to_path_buf(), + repo: repo, + }) } pub fn db_at(&self, db_path: &Path) -> CargoResult { let repo = try!(git2::Repository::open(db_path)); Ok(GitDatabase { remote: self.clone(), - path: db_path.clone(), + path: db_path.to_path_buf(), repo: repo, }) } @@ -145,9 +151,9 @@ impl GitRemote { fn clone_into(&self, dst: &Path) -> CargoResult { let url = self.url.to_string(); if dst.exists() { - try!(rmdir_recursive(dst)); + try!(fs::remove_dir_all(dst)); } - try!(mkdir_recursive(dst, USER_DIR)); + try!(fs::create_dir_all(dst)); let repo = try!(git2::Repository::init_bare(dst)); try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*")); Ok(repo) @@ -222,7 +228,7 @@ impl<'a> GitCheckout<'a> { -> GitCheckout<'a> { GitCheckout { - location: path.clone(), + location: path.to_path_buf(), database: database, revision: revision, repo: repo, @@ -240,14 +246,14 @@ impl<'a> GitCheckout<'a> { } fn clone_repo(source: &Path, into: &Path) -> CargoResult { - let dirname = into.dir_path(); + let dirname = into.parent().unwrap(); - try!(mkdir_recursive(&dirname, USER_DIR).chain_error(|| { + try!(fs::create_dir_all(&dirname).chain_error(|| { human(format!("Couldn't mkdir {}", dirname.display())) })); if into.exists() { - try!(rmdir_recursive(into).chain_error(|| { + try!(fs::remove_dir_all(into).chain_error(|| { human(format!("Couldn't rmdir {}", into.display())) })); } @@ -288,7 +294,7 @@ impl<'a> GitCheckout<'a> { return update_submodules(&self.repo); fn update_submodules(repo: &git2::Repository) -> CargoResult<()> { - info!("update submodules for: {}", repo.path().display()); + info!("update submodules for: {:?}", repo.workdir().unwrap()); for mut child in try!(repo.submodules()).into_iter() { try!(child.init(false)); @@ -319,7 +325,7 @@ impl<'a> GitCheckout<'a> { repo } Err(..) => { - let path = repo.path().dir_path().join(child.path()); + let path = repo.workdir().unwrap().join(child.path()); try!(git2::Repository::clone(url, &path)) } }; diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 054c603d4..26a673d15 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -1,16 +1,19 @@ use std::cmp; use std::fmt::{self, Debug, Formatter}; -use std::old_io::fs::{self, PathExtensions}; +use std::fs; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + use glob::Pattern; use git2; use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; use ops; -use util::{CargoResult, internal, internal_error, human, ChainError, Config}; +use util::{self, CargoResult, internal, internal_error, human, ChainError, Config}; pub struct PathSource<'a, 'b: 'a> { id: SourceId, - path: Path, + path: PathBuf, updated: bool, packages: Vec, config: &'a Config<'b>, @@ -34,7 +37,7 @@ impl<'a, 'b> PathSource<'a, 'b> { PathSource { id: id.clone(), - path: path.clone(), + path: path.to_path_buf(), updated: false, packages: Vec::new(), config: config, @@ -48,7 +51,7 @@ impl<'a, 'b> PathSource<'a, 'b> { return Err(internal("source has not been updated")) } - match self.packages.iter().find(|p| p.root() == self.path) { + match self.packages.iter().find(|p| p.root() == &*self.path) { Some(pkg) => Ok(pkg.clone()), None => Err(internal("no package found in source")) } @@ -71,8 +74,8 @@ impl<'a, 'b> PathSource<'a, 'b> { /// The basic assumption of this method is that all files in the directory /// are relevant for building this package, but it also contains logic to /// use other methods like .gitignore to filter the list of files. - pub fn list_files(&self, pkg: &Package) -> CargoResult> { - let root = pkg.manifest_path().dir_path(); + pub fn list_files(&self, pkg: &Package) -> CargoResult> { + let root = pkg.root(); let parse = |&: p: &String| { Pattern::new(p).map_err(|e| { @@ -85,7 +88,7 @@ impl<'a, 'b> PathSource<'a, 'b> { .map(|p| parse(p)).collect::, _>>()); let mut filter = |p: &Path| { - let relative_path = p.path_relative_from(&root).unwrap(); + let relative_path = p.relative_from(&root).unwrap(); include.iter().any(|p| p.matches_path(&relative_path)) || { include.len() == 0 && !exclude.iter().any(|p| p.matches_path(&relative_path)) @@ -103,7 +106,7 @@ impl<'a, 'b> PathSource<'a, 'b> { // us there most of the time!. let repo = self.packages.iter() .map(|pkg| pkg.root()) - .filter(|path| path.is_ancestor_of(&root)) + .filter(|path| root.starts_with(path)) .filter_map(|path| git2::Repository::open(&path).ok()) .next(); match repo { @@ -114,7 +117,7 @@ impl<'a, 'b> PathSource<'a, 'b> { fn list_files_git(&self, pkg: &Package, repo: git2::Repository, filter: &mut F) - -> CargoResult> + -> CargoResult> where F: FnMut(&Path) -> bool { warn!("list_files_git {}", pkg.package_id()); @@ -123,15 +126,20 @@ impl<'a, 'b> PathSource<'a, 'b> { Some(dir) => dir, None => return Err(internal_error("Can't list files on a bare repository.", "")), }; - let pkg_path = pkg.manifest_path().dir_path(); + + // Right now there is a bug such that "/a/b".relative_from("/a/") + // returns `None` so here we chop of the trailing slash if there is one. + // It is unclear to me whether this is actually a bug with paths or not. + let root = util::lose_the_slash(&root); + let pkg_path = pkg.root(); let mut ret = Vec::new(); 'outer: for entry in index.iter() { let fname = &entry.path[..]; - let file_path = root.join(fname); + let file_path = try!(join(&root, fname)); // Filter out files outside this package. - if !pkg_path.is_ancestor_of(&file_path) { continue } + if !file_path.starts_with(pkg_path) { continue } // Filter out Cargo.lock and target always if fname == b"Cargo.lock" { continue } @@ -139,9 +147,9 @@ impl<'a, 'b> PathSource<'a, 'b> { // Filter out sub-packages of this package for other_pkg in self.packages.iter().filter(|p| *p != pkg) { - let other_path = other_pkg.manifest_path().dir_path(); - if pkg_path.is_ancestor_of(&other_path) && - other_path.is_ancestor_of(&file_path) { + let other_path = other_pkg.root(); + if other_path.starts_with(pkg_path) && + file_path.starts_with(other_path) { continue 'outer; } } @@ -150,8 +158,8 @@ impl<'a, 'b> PathSource<'a, 'b> { // of just calling stat() again if file_path.is_dir() { warn!(" found submodule {}", file_path.display()); - let rel = file_path.path_relative_from(&root).unwrap(); - let rel = try!(rel.as_str().chain_error(|| { + let rel = file_path.relative_from(&root).unwrap(); + let rel = try!(rel.to_str().chain_error(|| { human(format!("invalid utf-8 filename: {}", rel.display())) })); let submodule = try!(repo.find_submodule(rel)); @@ -167,40 +175,57 @@ impl<'a, 'b> PathSource<'a, 'b> { ret.push(file_path); } } - Ok(ret) + return Ok(ret); + + #[cfg(unix)] + fn join(path: &Path, data: &[u8]) -> CargoResult { + use std::os::unix::prelude::*; + use std::ffi::OsStr; + Ok(path.join(::from_bytes(data))) + } + #[cfg(windows)] + fn join(path: &Path, data: &[u8]) -> CargoResult { + use std::str; + match str::from_utf8(data) { + Ok(s) => Ok(path.join(s)), + Err(..) => Err(internal("cannot process path in git with a non \ + unicode filename")), + } + } } fn list_files_walk(&self, pkg: &Package, mut filter: F) - -> CargoResult> + -> CargoResult> where F: FnMut(&Path) -> bool { let mut ret = Vec::new(); for pkg in self.packages.iter().filter(|p| *p == pkg) { - let loc = pkg.manifest_path().dir_path(); - try!(walk(&loc, &mut ret, true, &mut filter)); + let loc = pkg.manifest_path().parent().unwrap(); + try!(walk(loc, &mut ret, true, &mut filter)); } return Ok(ret); - fn walk(path: &Path, ret: &mut Vec, + fn walk(path: &Path, ret: &mut Vec, is_root: bool, filter: &mut F) -> CargoResult<()> where F: FnMut(&Path) -> bool { if !path.is_dir() { if (*filter)(path) { - ret.push(path.clone()); + ret.push(path.to_path_buf()); } return Ok(()) } // Don't recurse into any sub-packages that we have if !is_root && path.join("Cargo.toml").exists() { return Ok(()) } - for dir in try!(fs::readdir(path)).iter() { - match (is_root, dir.filename_str()) { + for dir in try!(fs::read_dir(path)) { + let dir = try!(dir).path(); + match (is_root, dir.file_name().and_then(|s| s.to_str())) { (_, Some(".git")) | (true, Some("target")) | (true, Some("Cargo.lock")) => continue, _ => {} } - try!(walk(dir, ret, false, filter)); + try!(walk(&dir, ret, false, filter)); } return Ok(()) } @@ -259,8 +284,9 @@ impl<'a, 'b> Source for PathSource<'a, 'b> { // condition where this path was rm'ed - either way, // we can ignore the error and treat the path's mtime // as 0. - warn!("{} {}", file.stat().map(|s| s.modified).unwrap_or(0), file.display()); - max = cmp::max(max, file.stat().map(|s| s.modified).unwrap_or(0)); + let mtime = file.metadata().map(|s| s.modified()).unwrap_or(0); + warn!("{} {}", mtime, file.display()); + max = cmp::max(max, mtime); } trace!("fingerprint {}: {}", self.path.display(), max); Ok(max.to_string()) diff --git a/src/cargo/sources/registry.rs b/src/cargo/sources/registry.rs index 2a26e1a6f..c28f9f269 100644 --- a/src/cargo/sources/registry.rs +++ b/src/cargo/sources/registry.rs @@ -158,12 +158,13 @@ //! ... //! ``` -use std::old_io::{self, fs, File}; -use std::old_io::fs::PathExtensions; use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; use curl::http; -use flate2::reader::GzDecoder; +use flate2::read::GzDecoder; use git2; use rustc_serialize::hex::ToHex; use rustc_serialize::json; @@ -181,9 +182,9 @@ static DEFAULT: &'static str = "https://github.com/rust-lang/crates.io-index"; pub struct RegistrySource<'a, 'b:'a> { source_id: SourceId, - checkout_path: Path, - cache_path: Path, - src_path: Path, + checkout_path: PathBuf, + cache_path: PathBuf, + src_path: PathBuf, config: &'a Config<'b>, handle: Option, sources: Vec>, @@ -265,7 +266,8 @@ impl<'a, 'b> RegistrySource<'a, 'b> { /// This requires that the index has been at least checked out. pub fn config(&self) -> CargoResult { let mut f = try!(File::open(&self.checkout_path.join("config.json"))); - let contents = try!(f.read_to_string()); + let mut contents = String::new(); + try!(f.read_to_string(&mut contents)); let config = try!(json::decode(&contents)); Ok(config) } @@ -281,8 +283,8 @@ impl<'a, 'b> RegistrySource<'a, 'b> { Err(..) => {} } - try!(fs::mkdir_recursive(&self.checkout_path, old_io::USER_DIR)); - let _ = fs::rmdir_recursive(&self.checkout_path); + try!(fs::create_dir_all(&self.checkout_path)); + let _ = fs::remove_dir_all(&self.checkout_path); let repo = try!(git2::Repository::init(&self.checkout_path)); Ok(repo) } @@ -295,14 +297,14 @@ impl<'a, 'b> RegistrySource<'a, 'b> { /// /// No action is taken if the package is already downloaded. fn download_package(&mut self, pkg: &PackageId, url: &Url) - -> CargoResult { + -> CargoResult { // TODO: should discover from the S3 redirect let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); - let dst = self.cache_path.join(filename); + let dst = self.cache_path.join(&filename); if dst.exists() { return Ok(dst) } try!(self.config.shell().status("Downloading", pkg)); - try!(fs::mkdir_recursive(&dst.dir_path(), old_io::USER_DIR)); + try!(fs::create_dir_all(dst.parent().unwrap())); let handle = match self.handle { Some(ref mut handle) => handle, None => { @@ -333,7 +335,7 @@ impl<'a, 'b> RegistrySource<'a, 'b> { pkg))) } - try!(File::create(&dst).write_all(resp.get_body())); + try!(try!(File::create(&dst)).write_all(resp.get_body())); Ok(dst) } @@ -341,17 +343,17 @@ impl<'a, 'b> RegistrySource<'a, 'b> { /// compiled. /// /// No action is taken if the source looks like it's already unpacked. - fn unpack_package(&self, pkg: &PackageId, tarball: Path) - -> CargoResult { - let dst = self.src_path.join(format!("{}-{}", pkg.name(), - pkg.version())); + fn unpack_package(&self, pkg: &PackageId, tarball: PathBuf) + -> CargoResult { + let dst = self.src_path.join(&format!("{}-{}", pkg.name(), + pkg.version())); if dst.join(".cargo-ok").exists() { return Ok(dst) } - try!(fs::mkdir_recursive(&dst.dir_path(), old_io::USER_DIR)); + try!(fs::create_dir_all(dst.parent().unwrap())); let f = try!(File::open(&tarball)); let gz = try!(GzDecoder::new(f)); let mut tar = Archive::new(gz); - try!(tar.unpack(&dst.dir_path())); + try!(tar.unpack(dst.parent().unwrap())); try!(File::create(&dst.join(".cargo-ok"))); Ok(dst) } @@ -365,16 +367,17 @@ impl<'a, 'b> RegistrySource<'a, 'b> { let path = self.checkout_path.clone(); let fs_name = name.chars().map(|c| c.to_lowercase()).collect::(); let path = match fs_name.len() { - 1 => path.join("1").join(fs_name), - 2 => path.join("2").join(fs_name), - 3 => path.join("3").join(&fs_name[..1]).join(fs_name), + 1 => path.join("1").join(&fs_name), + 2 => path.join("2").join(&fs_name), + 3 => path.join("3").join(&fs_name[..1]).join(&fs_name), _ => path.join(&fs_name[0..2]) .join(&fs_name[2..4]) - .join(fs_name), + .join(&fs_name), }; let summaries = match File::open(&path) { Ok(mut f) => { - let contents = try!(f.read_to_string()); + let mut contents = String::new(); + try!(f.read_to_string(&mut contents)); let ret: CargoResult>; ret = contents.lines().filter(|l| l.trim().len() > 0) .map(|l| self.parse_registry_package(l)) diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs index 076e68d60..ad8f359d3 100644 --- a/src/cargo/util/config.rs +++ b/src/cargo/util/config.rs @@ -3,9 +3,10 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::hash_map::{HashMap}; use std::env; use std::fmt; +use std::fs::{self, File}; +use std::io::prelude::*; use std::mem; -use std::old_io::fs::{self, PathExtensions, File}; -use std::old_io; +use std::path::{Path, PathBuf}; use rustc_serialize::{Encodable,Encoder}; use toml; @@ -18,14 +19,14 @@ use util::toml as cargo_toml; use self::ConfigValue as CV; pub struct Config<'a> { - home_path: Path, + home_path: PathBuf, shell: RefCell<&'a mut MultiShell>, rustc_version: String, /// The current host and default target of rustc rustc_host: String, values: RefCell>, values_loaded: Cell, - cwd: Path, + cwd: PathBuf, } impl<'a> Config<'a> { @@ -51,23 +52,23 @@ impl<'a> Config<'a> { pub fn home(&self) -> &Path { &self.home_path } - pub fn git_db_path(&self) -> Path { + pub fn git_db_path(&self) -> PathBuf { self.home_path.join("git").join("db") } - pub fn git_checkout_path(&self) -> Path { + pub fn git_checkout_path(&self) -> PathBuf { self.home_path.join("git").join("checkouts") } - pub fn registry_index_path(&self) -> Path { + pub fn registry_index_path(&self) -> PathBuf { self.home_path.join("registry").join("index") } - pub fn registry_cache_path(&self) -> Path { + pub fn registry_cache_path(&self) -> PathBuf { self.home_path.join("registry").join("cache") } - pub fn registry_source_path(&self) -> Path { + pub fn registry_source_path(&self) -> PathBuf { self.home_path.join("registry").join("src") } @@ -123,7 +124,7 @@ impl<'a> Config<'a> { Ok(Some(val.clone())) } - pub fn get_string(&self, key: &str) -> CargoResult> { + pub fn get_string(&self, key: &str) -> CargoResult> { match try!(self.get(key)) { Some(CV::String(i, path)) => Ok(Some((i, path))), Some(val) => self.expected("string", key, val), @@ -132,7 +133,7 @@ impl<'a> Config<'a> { } pub fn get_table(&self, key: &str) - -> CargoResult, Path)>> { + -> CargoResult, PathBuf)>> { match try!(self.get(key)) { Some(CV::Table(i, path)) => Ok(Some((i, path))), Some(val) => self.expected("table", key, val), @@ -140,7 +141,7 @@ impl<'a> Config<'a> { } } - pub fn get_i64(&self, key: &str) -> CargoResult> { + pub fn get_i64(&self, key: &str) -> CargoResult> { match try!(self.get(key)) { Some(CV::Integer(i, path)) => Ok(Some((i, path))), Some(val) => self.expected("integer", key, val), @@ -155,11 +156,11 @@ impl<'a> Config<'a> { } fn load_values(&self) -> CargoResult<()> { - let mut cfg = CV::Table(HashMap::new(), Path::new(".")); + let mut cfg = CV::Table(HashMap::new(), PathBuf::new(".")); - try!(walk_tree(&self.cwd, |mut file| { - let path = file.path().clone(); - let contents = try!(file.read_to_string()); + try!(walk_tree(&self.cwd, |mut file, path| { + let mut contents = String::new(); + try!(file.read_to_string(&mut contents)); let table = try!(cargo_toml::parse(&contents, &path).chain_error(|| { human(format!("could not parse TOML configuration in `{}`", path.display())) @@ -190,11 +191,11 @@ pub enum Location { #[derive(Eq,PartialEq,Clone,RustcDecodable)] pub enum ConfigValue { - Integer(i64, Path), - String(String, Path), - List(Vec<(String, Path)>, Path), - Table(HashMap, Path), - Boolean(bool, Path), + Integer(i64, PathBuf), + String(String, PathBuf), + List(Vec<(String, PathBuf)>, PathBuf), + Table(HashMap, PathBuf), + Boolean(bool, PathBuf), } impl fmt::Debug for ConfigValue { @@ -237,17 +238,17 @@ impl Encodable for ConfigValue { impl ConfigValue { fn from_toml(path: &Path, toml: toml::Value) -> CargoResult { match toml { - toml::Value::String(val) => Ok(CV::String(val, path.clone())), - toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.clone())), - toml::Value::Integer(i) => Ok(CV::Integer(i, path.clone())), + toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())), + toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())), + toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())), toml::Value::Array(val) => { Ok(CV::List(try!(val.into_iter().map(|toml| { match toml { - toml::Value::String(val) => Ok((val, path.clone())), + toml::Value::String(val) => Ok((val, path.to_path_buf())), v => Err(human(format!("expected string but found {} \ in list", v.type_str()))), } - }).collect::>()), path.clone())) + }).collect::>()), path.to_path_buf())) } toml::Value::Table(val) => { Ok(CV::Table(try!(val.into_iter().map(|(key, value)| { @@ -255,7 +256,7 @@ impl ConfigValue { human(format!("failed to parse key `{}`", key)) })); Ok((key, value)) - }).collect::>()), path.clone())) + }).collect::>()), path.to_path_buf())) } v => return Err(human(format!("found TOML configuration value of \ unknown type `{}`", v.type_str()))) @@ -276,7 +277,7 @@ impl ConfigValue { for (key, value) in new.into_iter() { match old.entry(key.clone()) { Occupied(mut entry) => { - let path = value.definition_path().clone(); + let path = value.definition_path().to_path_buf(); let entry = entry.get_mut(); try!(entry.merge(value).chain_error(|| { human(format!("failed to merge key `{}` between \ @@ -323,7 +324,7 @@ impl ConfigValue { } } - pub fn list(&self) -> CargoResult<&[(String, Path)]> { + pub fn list(&self) -> CargoResult<&[(String, PathBuf)]> { match *self { CV::List(ref list, _) => Ok(list), _ => self.expected("list"), @@ -379,25 +380,28 @@ impl ConfigValue { } } -fn homedir() -> Option { - let cargo_home = env::var("CARGO_HOME").map(|p| Path::new(p)).ok(); +fn homedir() -> Option { + let cargo_home = env::var_os("CARGO_HOME").map(|p| PathBuf::new(&p)); let user_home = env::home_dir().map(|p| p.join(".cargo")); return cargo_home.or(user_home); } fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> - where F: FnMut(File) -> CargoResult<()> + where F: FnMut(File, &Path) -> CargoResult<()> { - let mut current = pwd.clone(); + let mut current = pwd; loop { let possible = current.join(".cargo").join("config"); if possible.exists() { let file = try!(File::open(&possible)); - try!(walk(file)); + try!(walk(file, &possible)); + } + match current.parent() { + Some(p) => current = p, + None => break, } - if !current.pop() { break; } } // Once we're done, also be sure to walk the home directory even if it's not @@ -407,11 +411,11 @@ fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> human("Cargo couldn't find your home directory. \ This probably means that $HOME was not set.") })); - if !home.is_ancestor_of(pwd) { + if !pwd.starts_with(&home) { let config = home.join("config"); if config.exists() { let file = try!(File::open(&config)); - try!(walk(file)); + try!(walk(file, &config)); } } @@ -429,8 +433,9 @@ pub fn set_config(cfg: &Config, loc: Location, key: &str, Location::Global => cfg.home_path.join("config"), Location::Project => unimplemented!(), }; - try!(fs::mkdir_recursive(&file.dir_path(), old_io::USER_DIR)); - let contents = File::open(&file).read_to_string().unwrap_or("".to_string()); + try!(fs::create_dir_all(file.parent().unwrap())); + let mut contents = String::new(); + let _ = File::open(&file).and_then(|mut f| f.read_to_string(&mut contents)); let mut toml = try!(cargo_toml::parse(&contents, &file)); toml.insert(key.to_string(), value.into_toml()); let mut out = try!(File::create(&file)); diff --git a/src/cargo/util/errors.rs b/src/cargo/util/errors.rs index 712c1a360..45ccc20df 100644 --- a/src/cargo/util/errors.rs +++ b/src/cargo/util/errors.rs @@ -1,8 +1,9 @@ use std::error::{FromError, Error}; use std::ffi; use std::fmt; +use std::io; use std::old_io::IoError; -use std::old_io::process::{ProcessOutput, ProcessExit, ExitStatus, ExitSignal}; +use std::process::{Output, ExitStatus}; use std::str; use semver; @@ -111,9 +112,9 @@ impl CargoError for ChainedError { pub struct ProcessError { pub desc: String, - pub exit: Option, - pub output: Option, - cause: Option, + pub exit: Option, + pub output: Option, + cause: Option, } impl Error for ProcessError { @@ -255,6 +256,7 @@ macro_rules! from_error { from_error! { semver::ReqParseError, IoError, + io::Error, ProcessError, git2::Error, json::DecoderError, @@ -272,6 +274,7 @@ impl FromError> for Box { impl CargoError for semver::ReqParseError {} impl CargoError for IoError {} +impl CargoError for io::Error {} impl CargoError for git2::Error {} impl CargoError for json::DecoderError {} impl CargoError for curl::ErrCode {} @@ -287,24 +290,24 @@ impl CargoError for ffi::NulError {} // Construction helpers pub fn process_error(msg: &str, - cause: Option, - status: Option<&ProcessExit>, - output: Option<&ProcessOutput>) -> ProcessError { + cause: Option, + status: Option<&ExitStatus>, + output: Option<&Output>) -> ProcessError { let exit = match status { - Some(&ExitStatus(i)) | Some(&ExitSignal(i)) => i.to_string(), + Some(s) => s.to_string(), None => "never executed".to_string(), }; - let mut desc = format!("{} (status={})", &msg, exit); + let mut desc = format!("{} ({})", &msg, exit); if let Some(out) = output { - match str::from_utf8(&out.output) { + match str::from_utf8(&out.stdout) { Ok(s) if s.trim().len() > 0 => { desc.push_str("\n--- stdout\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } - match str::from_utf8(&out.error) { + match str::from_utf8(&out.stderr) { Ok(s) if s.trim().len() > 0 => { desc.push_str("\n--- stderr\n"); desc.push_str(s); diff --git a/src/cargo/util/hex.rs b/src/cargo/util/hex.rs index f3479666d..7530c5472 100644 --- a/src/cargo/util/hex.rs +++ b/src/cargo/util/hex.rs @@ -3,9 +3,16 @@ use std::hash::{Hasher, Hash, SipHasher}; use rustc_serialize::hex::ToHex; pub fn to_hex(num: u64) -> String { - let mut writer = Vec::with_capacity(8); - writer.write_le_u64(num).unwrap(); // this should never fail - writer.to_hex() + [ + (num >> 0) as u8, + (num >> 8) as u8, + (num >> 16) as u8, + (num >> 24) as u8, + (num >> 32) as u8, + (num >> 40) as u8, + (num >> 48) as u8, + (num >> 56) as u8, + ].to_hex() } pub fn short_hash(hashable: &H) -> String { diff --git a/src/cargo/util/important_paths.rs b/src/cargo/util/important_paths.rs index 7cc74d9d5..7facb0980 100644 --- a/src/cargo/util/important_paths.rs +++ b/src/cargo/util/important_paths.rs @@ -1,10 +1,11 @@ use std::env; -use std::old_io::fs::PathExtensions; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; use util::{CargoResult, human, ChainError}; /// Iteratively search for `file` in `pwd` and its parents, returning /// the path of the directory. -pub fn find_project(pwd: &Path, file: &str) -> CargoResult { +pub fn find_project(pwd: &Path, file: &str) -> CargoResult { find_project_manifest(pwd, file).map(|mut p| { // remove the file, leaving just the directory p.pop(); @@ -14,8 +15,8 @@ pub fn find_project(pwd: &Path, file: &str) -> CargoResult { /// Iteratively search for `file` in `pwd` and its parents, returning /// the path to the file. -pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult { - let mut current = pwd.clone(); +pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult { + let mut current = pwd; loop { let manifest = current.join(file); @@ -23,7 +24,10 @@ pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult { return Ok(manifest) } - if !current.pop() { break; } + match current.parent() { + Some(p) => current = p, + None => break, + } } Err(human(format!("Could not find `{}` in `{}` or any parent directory", @@ -32,18 +36,18 @@ pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult { /// Find the root Cargo.toml pub fn find_root_manifest_for_cwd(manifest_path: Option) - -> CargoResult { + -> CargoResult { let cwd = try!(env::current_dir().chain_error(|| { human("Couldn't determine the current working directory") })); match manifest_path { - Some(path) => Ok(cwd.join(path)), + Some(path) => Ok(cwd.join(&path)), None => find_project_manifest(&cwd, "Cargo.toml"), } } /// Return the path to the `file` in `pwd`, if it exists. -pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { +pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { let manifest = pwd.join(file); if manifest.exists() { diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index cd19462ac..14f16393f 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -4,7 +4,8 @@ pub use self::errors::{CargoResult, CargoError, ChainError, CliResult}; pub use self::errors::{CliError, ProcessError}; pub use self::errors::{process_error, internal_error, internal, human}; pub use self::errors::{Human, caused_human}; -pub use self::paths::{realpath, join_paths}; +pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path}; +pub use self::paths::{normalize_path, lose_the_slash}; pub use self::lev_distance::{lev_distance}; pub use self::hex::{to_hex, short_hash}; pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness}; diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index f02fefe10..9f736f381 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -1,54 +1,15 @@ use std::env; -use std::old_io::fs; -use std::old_io; -use std::old_path::BytesContainer; -use std::os; +use std::dynamic_lib::DynamicLibrary; +use std::ffi::{AsOsStr, OsString}; +use std::path::{Path, PathBuf, Component}; use util::{human, internal, CargoResult, ChainError}; -pub fn realpath(original: &Path) -> old_io::IoResult { - const MAX_LINKS_FOLLOWED: usize = 256; - let cwd = try!(env::current_dir()); - let original = cwd.join(original); - - // Right now lstat on windows doesn't work quite well - if cfg!(windows) { - return Ok(original) - } - - let result = original.root_path(); - let mut result = result.expect("make_absolute has no root_path"); - let mut followed = 0; - - for part in original.components() { - result.push(part); - - loop { - if followed == MAX_LINKS_FOLLOWED { - return Err(old_io::standard_error(old_io::InvalidInput)) - } - - match fs::lstat(&result) { - Err(..) => break, - Ok(ref stat) if stat.kind != old_io::FileType::Symlink => break, - Ok(..) => { - followed += 1; - let path = try!(fs::readlink(&result)); - result.pop(); - result.push(path); - } - } - } - } - - return Ok(result); -} - -#[allow(deprecated)] // need an OsStr-based Command first -pub fn join_paths(paths: &[T], env: &str) - -> CargoResult> { - os::join_paths(paths).or_else(|e| { - let paths = paths.iter().map(|p| Path::new(p)).collect::>(); +pub fn join_paths(paths: &[T], env: &str) -> CargoResult { + env::join_paths(paths.iter()).or_else(|e| { + let paths = paths.iter().map(|p| { + Path::new(p.as_os_str()) + }).collect::>(); internal(format!("failed to join path array: {:?}", paths)).chain_error(|| { human(format!("failed to join search paths together: {}\n\ Does ${} have an unterminated quote character?", @@ -56,3 +17,70 @@ pub fn join_paths(paths: &[T], env: &str) }) }) } + +pub fn dylib_path() -> Vec { + match env::var_os(DynamicLibrary::envvar()) { + Some(var) => env::split_paths(&var).collect(), + None => Vec::new(), + } +} + +pub fn normalize_path(path: &Path) -> PathBuf { + let mut components = path.components(); + let mut ret = if let Some(c @ Component::Prefix { .. }) = components.peek() { + components.next(); + PathBuf::new(c.as_os_str()) + } else { + PathBuf::new("") + }; + + for component in components { + match component { + Component::Prefix { .. } => unreachable!(), + Component::Empty => { ret.push(""); } + Component::RootDir => { ret.push(component.as_os_str()); } + Component::CurDir => {} + Component::ParentDir => { ret.pop(); } + Component::Normal(c) => { ret.push(c); } + } + } + return ret; +} + +/// Chop off the trailing slash of a path +pub fn lose_the_slash(path: &Path) -> &Path { + let mut components = path.components(); + match components.next_back() { + Some(Component::CurDir) => components.as_path(), + _ => path, + } +} + +#[cfg(unix)] +pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { + use std::os::unix::prelude::*; + Ok(path.as_os_str().as_bytes()) +} +#[cfg(windows)] +pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { + match path.as_os_str().to_str() { + Some(s) => Ok(s.as_bytes()), + None => Err(human(format!("invalid non-unicode path: {}", + path.display()))) + } +} + +#[cfg(unix)] +pub fn bytes2path(bytes: &[u8]) -> CargoResult { + use std::os::unix::prelude::*; + use std::ffi::OsStr; + Ok(PathBuf::new(::from_bytes(bytes))) +} +#[cfg(windows)] +pub fn bytes2path(bytes: &[u8]) -> CargoResult { + use std::str; + match str::from_utf8(bytes) { + Ok(s) => Ok(PathBuf::new(s)), + Err(..) => Err(human("invalid non-unicode path")), + } +} diff --git a/src/cargo/util/process_builder.rs b/src/cargo/util/process_builder.rs index f6fe4d2aa..025ebcb30 100644 --- a/src/cargo/util/process_builder.rs +++ b/src/cargo/util/process_builder.rs @@ -1,26 +1,26 @@ use std::collections::HashMap; use std::env; -use std::ffi::CString; -use std::fmt::{self, Formatter}; -use std::old_io::process::{Command, ProcessOutput, InheritFd}; -use std::old_path::BytesContainer; +use std::ffi::{OsString, AsOsStr}; +use std::fmt; +use std::path::Path; +use std::process::{Command, Output}; use util::{CargoResult, ProcessError, process_error}; #[derive(Clone, PartialEq, Debug)] pub struct ProcessBuilder { - program: CString, - args: Vec, - env: HashMap>, - cwd: Path, + program: OsString, + args: Vec, + env: HashMap>, + cwd: OsString, } impl fmt::Display for ProcessBuilder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "`{}", String::from_utf8_lossy(self.program.as_bytes()))); + try!(write!(f, "`{}", self.program.to_string_lossy())); for arg in self.args.iter() { - try!(write!(f, " {}", String::from_utf8_lossy(arg.as_bytes()))); + try!(write!(f, " {}", arg.to_string_lossy())); } write!(f, "`") @@ -28,41 +28,48 @@ impl fmt::Display for ProcessBuilder { } impl ProcessBuilder { - pub fn arg(mut self, arg: T) -> ProcessBuilder { - self.args.push(CString::new(arg.container_as_bytes()).unwrap()); + pub fn arg(&mut self, arg: &T) -> &mut ProcessBuilder { + self.args.push(arg.as_os_str().to_os_string()); self } - pub fn args(mut self, arguments: &[T]) -> ProcessBuilder { + pub fn args(&mut self, arguments: &[T]) -> &mut ProcessBuilder { self.args.extend(arguments.iter().map(|t| { - CString::new(t.container_as_bytes()).unwrap() + t.as_os_str().to_os_string() })); self } - pub fn get_args(&self) -> &[CString] { + pub fn cwd(&mut self, path: &T) -> &mut ProcessBuilder { + self.cwd = path.as_os_str().to_os_string(); + self + } + + pub fn env(&mut self, key: &str, + val: &T) -> &mut ProcessBuilder { + self.env.insert(key.to_string(), Some(val.as_os_str().to_os_string())); + self + } + + pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { + self.env.insert(key.to_string(), None); + self + } + + pub fn get_args(&self) -> &[OsString] { &self.args } + pub fn get_cwd(&self) -> &Path { Path::new(&self.cwd) } - pub fn cwd(mut self, path: Path) -> ProcessBuilder { - self.cwd = path; - self + pub fn get_env(&self, var: &str) -> Option { + self.env.get(var).cloned().or_else(|| Some(env::var_os(var))) + .and_then(|s| s) } - pub fn env(mut self, key: &str, - val: Option) -> ProcessBuilder { - let val = val.map(|t| CString::new(t.container_as_bytes()).unwrap()); - self.env.insert(key.to_string(), val); - self - } + pub fn get_envs(&self) -> &HashMap> { &self.env } - // TODO: should InheritFd be hardcoded? pub fn exec(&self) -> Result<(), ProcessError> { let mut command = self.build_command(); - command.stdout(InheritFd(1)) - .stderr(InheritFd(2)) - .stdin(InheritFd(0)); - let exit = try!(command.status().map_err(|e| { process_error(&format!("Could not execute process `{}`", self.debug_string()), @@ -78,8 +85,8 @@ impl ProcessBuilder { } } - pub fn exec_with_output(&self) -> Result { - let command = self.build_command(); + pub fn exec_with_output(&self) -> Result { + let mut command = self.build_command(); let output = try!(command.output().map_err(|e| { process_error(&format!("Could not execute process `{}`", @@ -98,7 +105,7 @@ impl ProcessBuilder { pub fn build_command(&self) -> Command { let mut command = Command::new(&self.program); - command.cwd(&self.cwd); + command.current_dir(&self.cwd); for arg in self.args.iter() { command.arg(arg); } @@ -112,20 +119,20 @@ impl ProcessBuilder { } fn debug_string(&self) -> String { - let mut program = format!("{}", String::from_utf8_lossy(self.program.as_bytes())); + let mut program = format!("{}", self.program.to_string_lossy()); for arg in self.args.iter() { program.push(' '); - program.push_str(&format!("{}", String::from_utf8_lossy(arg.as_bytes()))); + program.push_str(&format!("{}", arg.to_string_lossy())); } program } } -pub fn process(cmd: T) -> CargoResult { +pub fn process(cmd: &T) -> CargoResult { Ok(ProcessBuilder { - program: CString::new(cmd.container_as_bytes()).unwrap(), + program: cmd.as_os_str().to_os_string(), args: Vec::new(), - cwd: try!(env::current_dir()), + cwd: try!(env::current_dir()).as_os_str().to_os_string(), env: HashMap::new(), }) } diff --git a/src/cargo/util/to_url.rs b/src/cargo/util/to_url.rs index 840d71165..2e3365cb2 100644 --- a/src/cargo/util/to_url.rs +++ b/src/cargo/util/to_url.rs @@ -1,4 +1,5 @@ use url::{self, Url, UrlParser}; +use std::path::Path; pub trait ToUrl { fn to_url(self) -> Result; diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index a55b471fa..80a80e776 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -1,10 +1,12 @@ use std::collections::HashMap; - +use std::default::Default; use std::fmt; -use std::old_io::fs::{self, PathExtensions}; +use std::fs; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; use std::slice; use std::str; -use std::default::Default; + use toml; use semver; use rustc_serialize::{Decodable, Decoder}; @@ -22,18 +24,18 @@ use util::{CargoResult, human, ToUrl, ToSemver, ChainError, Config}; #[derive(Clone)] pub struct Layout { - pub root: Path, - lib: Option, - bins: Vec, - examples: Vec, - tests: Vec, - benches: Vec, + pub root: PathBuf, + lib: Option, + bins: Vec, + examples: Vec, + tests: Vec, + benches: Vec, } impl Layout { - fn main(&self) -> Option<&Path> { + fn main(&self) -> Option<&PathBuf> { self.bins.iter().find(|p| { - match p.filename_str() { + match p.file_name().and_then(|s| s.to_str()) { Some(s) => s == "main.rs", None => false } @@ -41,16 +43,19 @@ impl Layout { } } -fn try_add_file(files: &mut Vec, root: &Path, dir: &str) { - let p = root.join(dir); - if p.exists() { - files.push(p); +fn try_add_file(files: &mut Vec, file: PathBuf) { + if file.exists() { + files.push(file); } } -fn try_add_files(files: &mut Vec, root: &Path, dir: &str) { - match fs::readdir(&root.join(dir)) { +fn try_add_files(files: &mut Vec, root: PathBuf) { + match fs::read_dir(&root) { Ok(new) => { - files.extend(new.into_iter().filter(|f| f.extension_str() == Some("rs"))) + files.extend(new.filter_map(|dir| { + dir.map(|d| d.path()).ok() + }).filter(|f| { + f.extension().and_then(|s| s.to_str()) == Some("rs") + })) } Err(_) => {/* just don't add anything if the directory doesn't exist, etc. */} } @@ -66,20 +71,21 @@ pub fn project_layout(root_path: &Path) -> Layout { let mut tests = vec!(); let mut benches = vec!(); - if root_path.join("src/lib.rs").exists() { - lib = Some(root_path.join("src/lib.rs")); + let lib_canidate = root_path.join("src").join("lib.rs"); + if lib_canidate.exists() { + lib = Some(lib_canidate); } - try_add_file(&mut bins, root_path, "src/main.rs"); - try_add_files(&mut bins, root_path, "src/bin"); + try_add_file(&mut bins, root_path.join("src").join("main.rs")); + try_add_files(&mut bins, root_path.join("src").join("bin")); - try_add_files(&mut examples, root_path, "examples"); + try_add_files(&mut examples, root_path.join("examples")); - try_add_files(&mut tests, root_path, "tests"); - try_add_files(&mut benches, root_path, "benches"); + try_add_files(&mut tests, root_path.join("tests")); + try_add_files(&mut benches, root_path.join("benches")); Layout { - root: root_path.clone(), + root: root_path.to_path_buf(), lib: lib, bins: bins, examples: examples, @@ -92,11 +98,11 @@ pub fn to_manifest(contents: &[u8], source_id: &SourceId, layout: Layout, config: &Config) - -> CargoResult<(Manifest, Vec)> { + -> CargoResult<(Manifest, Vec)> { let manifest = layout.root.join("Cargo.toml"); - let manifest = match manifest.path_relative_from(config.cwd()) { - Some(path) => path, - None => manifest, + let manifest = match manifest.relative_from(config.cwd()) { + Some(path) => path.to_path_buf(), + None => manifest.clone(), }; let contents = try!(str::from_utf8(contents).chain_error(|| { human(format!("{} is not valid UTF-8", manifest.display())) @@ -292,7 +298,7 @@ impl TomlProject { struct Context<'a, 'b, 'c: 'b> { deps: &'a mut Vec, source_id: &'a SourceId, - nested_paths: &'a mut Vec, + nested_paths: &'a mut Vec, config: &'b Config<'c>, } @@ -314,11 +320,11 @@ fn inferred_lib_target(name: &str, layout: &Layout) -> Vec { fn inferred_bin_targets(name: &str, layout: &Layout) -> Vec { layout.bins.iter().filter_map(|bin| { - let name = if bin.as_vec() == b"src/main.rs" || - *bin == layout.root.join("src/main.rs") { + let name = if &**bin == Path::new("src/main.rs") || + *bin == layout.root.join("src").join("main.rs") { Some(name.to_string()) } else { - bin.filestem_str().map(|f| f.to_string()) + bin.file_stem().and_then(|s| s.to_str()).map(|f| f.to_string()) }; name.map(|name| { @@ -333,7 +339,7 @@ fn inferred_bin_targets(name: &str, layout: &Layout) -> Vec { fn inferred_example_targets(layout: &Layout) -> Vec { layout.examples.iter().filter_map(|ex| { - ex.filestem_str().map(|name| { + ex.file_stem().and_then(|s| s.to_str()).map(|name| { TomlTarget { name: name.to_string(), path: Some(PathValue::Path(ex.clone())), @@ -345,7 +351,7 @@ fn inferred_example_targets(layout: &Layout) -> Vec { fn inferred_test_targets(layout: &Layout) -> Vec { layout.tests.iter().filter_map(|ex| { - ex.filestem_str().map(|name| { + ex.file_stem().and_then(|s| s.to_str()).map(|name| { TomlTarget { name: name.to_string(), path: Some(PathValue::Path(ex.clone())), @@ -357,7 +363,7 @@ fn inferred_test_targets(layout: &Layout) -> Vec { fn inferred_bench_targets(layout: &Layout) -> Vec { layout.benches.iter().filter_map(|ex| { - ex.filestem_str().map(|name| { + ex.file_stem().and_then(|s| s.to_str()).map(|name| { TomlTarget { name: name.to_string(), path: Some(PathValue::Path(ex.clone())), @@ -370,7 +376,7 @@ fn inferred_bench_targets(layout: &Layout) -> Vec { impl TomlManifest { pub fn to_manifest(&self, source_id: &SourceId, layout: &Layout, config: &Config) - -> CargoResult<(Manifest, Vec)> { + -> CargoResult<(Manifest, Vec)> { let mut nested_paths = vec!(); let project = self.project.as_ref().or_else(|| self.package.as_ref()); @@ -441,7 +447,7 @@ impl TomlManifest { }; // processing the custom build script - let new_build = project.build.clone().map(Path::new); + let new_build = project.build.as_ref().map(PathBuf::new); // Get targets let profiles = self.profile.clone().unwrap_or(Default::default()); @@ -558,7 +564,7 @@ fn process_dependencies(cx: &mut Context, } None => { details.path.as_ref().map(|path| { - cx.nested_paths.push(Path::new(path)); + cx.nested_paths.push(PathBuf::new(path)); cx.source_id.clone() }) } @@ -594,7 +600,7 @@ struct TomlTarget { #[derive(RustcDecodable, Clone)] enum PathValue { String(String), - Path(Path), + Path(PathBuf), } /// Corresponds to a `target` entry, but `TomlTarget` is already used. @@ -620,9 +626,9 @@ impl TomlTarget { } impl PathValue { - fn to_path(&self) -> Path { + fn to_path(&self) -> PathBuf { match *self { - PathValue::String(ref s) => Path::new(s), + PathValue::String(ref s) => PathBuf::new(s), PathValue::Path(ref p) => p.clone(), } } @@ -639,7 +645,7 @@ impl fmt::Debug for PathValue { fn normalize(libs: &[TomlLibTarget], bins: &[TomlBinTarget], - custom_build: Option, + custom_build: Option, examples: &[TomlExampleTarget], tests: &[TomlTestTarget], benches: &[TomlBenchTarget], @@ -716,7 +722,7 @@ fn normalize(libs: &[TomlLibTarget], dep: TestDep, metadata: &Metadata, profiles: &TomlProfiles) { let l = &libs[0]; let path = l.path.clone().unwrap_or_else(|| { - PathValue::String(format!("src/{}.rs", l.name)) + PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name))) }); let crate_types = l.crate_type.clone().and_then(|kinds| { kinds.iter().map(|s| LibKind::from_str(s)) @@ -738,15 +744,13 @@ fn normalize(libs: &[TomlLibTarget], } } - fn bin_targets(dst: &mut Vec, bins: &[TomlBinTarget], - dep: TestDep, metadata: &Metadata, - profiles: &TomlProfiles, - mut default: F) - where F: FnMut(&TomlBinTarget) -> String - { + fn bin_targets(dst: &mut Vec, bins: &[TomlBinTarget], + dep: TestDep, metadata: &Metadata, + profiles: &TomlProfiles, + default: &mut FnMut(&TomlBinTarget) -> PathBuf) { for bin in bins.iter() { let path = bin.path.clone().unwrap_or_else(|| { - PathValue::String(default(bin)) + PathValue::Path(default(bin)) }); for profile in target_profiles(bin, profiles, dep).iter() { @@ -775,20 +779,21 @@ fn normalize(libs: &[TomlLibTarget], &profiles.dev), ]; - let name = format!("build-script-{}", cmd.filestem_str().unwrap_or("")); + let name = format!("build-script-{}", + cmd.file_stem().and_then(|s| s.to_str()).unwrap_or("")); for profile in profiles.iter() { dst.push(Target::custom_build_target(&name, cmd, profile, None)); } } - fn example_targets(dst: &mut Vec, examples: &[TomlExampleTarget], - profiles: &TomlProfiles, - mut default: F) - where F: FnMut(&TomlExampleTarget) -> String - { + fn example_targets(dst: &mut Vec, examples: &[TomlExampleTarget], + profiles: &TomlProfiles, + default: &mut FnMut(&TomlExampleTarget) -> PathBuf) { for ex in examples.iter() { - let path = ex.path.clone().unwrap_or_else(|| PathValue::String(default(ex))); + let path = ex.path.clone().unwrap_or_else(|| { + PathValue::Path(default(ex)) + }); let profile = merge(Profile::default_example(), &profiles.test); let profile_release = merge(Profile::default_release(), &profiles.release); @@ -801,14 +806,12 @@ fn normalize(libs: &[TomlLibTarget], } } - fn test_targets(dst: &mut Vec, tests: &[TomlTestTarget], - metadata: &Metadata, profiles: &TomlProfiles, - mut default: F) - where F: FnMut(&TomlTestTarget) -> String - { + fn test_targets(dst: &mut Vec, tests: &[TomlTestTarget], + metadata: &Metadata, profiles: &TomlProfiles, + default: &mut FnMut(&TomlTestTarget) -> PathBuf) { for test in tests.iter() { let path = test.path.clone().unwrap_or_else(|| { - PathValue::String(default(test)) + PathValue::Path(default(test)) }); let harness = test.harness.unwrap_or(true); @@ -825,14 +828,12 @@ fn normalize(libs: &[TomlLibTarget], } } - fn bench_targets(dst: &mut Vec, benches: &[TomlBenchTarget], - metadata: &Metadata, profiles: &TomlProfiles, - mut default: F) - where F: FnMut(&TomlBenchTarget) -> String - { + fn bench_targets(dst: &mut Vec, benches: &[TomlBenchTarget], + metadata: &Metadata, profiles: &TomlProfiles, + default: &mut FnMut(&TomlBenchTarget) -> PathBuf) { for bench in benches.iter() { let path = bench.path.clone().unwrap_or_else(|| { - PathValue::String(default(bench)) + PathValue::Path(default(bench)) }); let harness = bench.harness.unwrap_or(true); @@ -861,14 +862,16 @@ fn normalize(libs: &[TomlLibTarget], ([_, ..], [_, ..]) => { lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles); bin_targets(&mut ret, bins, test_dep, metadata, profiles, - |bin| format!("src/bin/{}.rs", bin.name)); + &mut |bin| Path::new("src").join("bin") + .join(&format!("{}.rs", bin.name))); }, ([_, ..], []) => { lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles); }, ([], [_, ..]) => { bin_targets(&mut ret, bins, test_dep, metadata, profiles, - |bin| format!("src/{}.rs", bin.name)); + &mut |bin| Path::new("src") + .join(&format!("{}.rs", bin.name))); }, ([], []) => () } @@ -878,23 +881,24 @@ fn normalize(libs: &[TomlLibTarget], } example_targets(&mut ret, examples, profiles, - |ex| format!("examples/{}.rs", ex.name)); + &mut |ex| Path::new("examples") + .join(&format!("{}.rs", ex.name))); - test_targets(&mut ret, tests, metadata, profiles, - |test| { - if test.name == "test" { - "src/test.rs".to_string() - } else { - format!("tests/{}.rs", test.name) - }}); + test_targets(&mut ret, tests, metadata, profiles, &mut |test| { + if test.name == "test" { + Path::new("src").join("test.rs") + } else { + Path::new("tests").join(&format!("{}.rs", test.name)) + } + }); - bench_targets(&mut ret, benches, metadata, profiles, - |bench| { - if bench.name == "bench" { - "src/bench.rs".to_string() - } else { - format!("benches/{}.rs", bench.name) - }}); + bench_targets(&mut ret, benches, metadata, profiles, &mut |bench| { + if bench.name == "bench" { + Path::new("src").join("bench.rs") + } else { + Path::new("benches").join(&format!("{}.rs", bench.name)) + } + }); ret } diff --git a/src/cargo/util/vcs.rs b/src/cargo/util/vcs.rs index 7847a75bb..d171ec5eb 100644 --- a/src/cargo/util/vcs.rs +++ b/src/cargo/util/vcs.rs @@ -1,4 +1,4 @@ -#![allow(missing_copy_implementations)] +use std::path::Path; use git2; @@ -19,12 +19,11 @@ impl GitRepo { impl HgRepo { pub fn init(path: &Path) -> CargoResult { - let path_str = path.as_str().unwrap(); - try!(try!(process("hg")).arg("init").arg(path_str).exec()); + try!(try!(process("hg")).arg("init").arg(path).exec()); return Ok(HgRepo) } pub fn discover(path: &Path) -> CargoResult { - try!(try!(process("hg")).arg("root").cwd(path.clone()).exec_with_output()); + try!(try!(process("hg")).arg("root").cwd(path).exec_with_output()); return Ok(HgRepo) } } diff --git a/src/registry/Cargo.toml b/src/registry/Cargo.toml index 4c1512437..622f99ec4 100644 --- a/src/registry/Cargo.toml +++ b/src/registry/Cargo.toml @@ -8,5 +8,5 @@ name = "registry" path = "lib.rs" [dependencies] -curl = "0.1" -rustc-serialize = "0.2" +curl = "0.2" +rustc-serialize = "0.3" diff --git a/src/registry/lib.rs b/src/registry/lib.rs index 48ca595b1..d6022b127 100644 --- a/src/registry/lib.rs +++ b/src/registry/lib.rs @@ -1,12 +1,14 @@ -#![feature(core, old_io, old_path)] +#![feature(core, io, path, fs)] extern crate curl; extern crate "rustc-serialize" as rustc_serialize; -use std::fmt; -use std::old_io::{self, fs, MemReader, MemWriter, File}; use std::collections::HashMap; -use std::old_io::util::ChainedReader; +use std::fmt; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::io::{self, Cursor}; +use std::path::Path; use std::result; use curl::http; @@ -14,7 +16,6 @@ use curl::http::handle::Method::{Put, Get, Delete}; use curl::http::handle::{Method, Request}; use rustc_serialize::json; - pub struct Registry { host: String, token: Option, @@ -36,7 +37,7 @@ pub enum Error { Api(Vec), Unauthorized, TokenMissing, - Io(old_io::IoError), + Io(io::Error), } #[derive(RustcDecodable)] @@ -133,18 +134,27 @@ impl Registry { // (metadata for the package) // // - let stat = try!(fs::stat(tarball).map_err(Error::Io)); + let stat = try!(fs::metadata(tarball).map_err(Error::Io)); let header = { - let mut w = MemWriter::new(); - w.write_le_u32(json.len() as u32).unwrap(); - w.write_str(&json).unwrap(); - w.write_le_u32(stat.size as u32).unwrap(); - MemReader::new(w.into_inner()) + let mut w = Vec::new(); + w.extend([ + (json.len() >> 0) as u8, + (json.len() >> 8) as u8, + (json.len() >> 16) as u8, + (json.len() >> 24) as u8, + ].iter().cloned()); + w.extend(json.as_bytes().iter().cloned()); + w.extend([ + (stat.len() >> 0) as u8, + (stat.len() >> 8) as u8, + (stat.len() >> 16) as u8, + (stat.len() >> 24) as u8, + ].iter().cloned()); + w }; let tarball = try!(File::open(tarball).map_err(Error::Io)); - let size = stat.size as usize + header.get_ref().len(); - let mut body = ChainedReader::new(vec![Box::new(header) as Box, - Box::new(tarball) as Box].into_iter()); + let size = stat.len() as usize + header.len(); + let mut body = Cursor::new(header).chain(tarball); let url = format!("{}/api/v1/crates/new", self.host); diff --git a/src/rustversion.txt b/src/rustversion.txt index d47abca81..86e833e0e 100644 --- a/src/rustversion.txt +++ b/src/rustversion.txt @@ -1 +1 @@ -2015-02-21 +2015-02-27 diff --git a/src/snapshots.txt b/src/snapshots.txt index b95d4da6d..5b78f0a8c 100644 --- a/src/snapshots.txt +++ b/src/snapshots.txt @@ -1,3 +1,11 @@ +2015-02-26 + linux-i386 2a28b604d09b4a76a54a05d91f7f158692427b3a + linux-x86_64 7367f4aca86d38e209ef7236b00175df036c03e2 + macos-i386 e5cabb0a4a2b4e47f7b1ae9b802e2b5d0b14eac5 + macos-x86_64 3026c60ddd46d2bcf1cb178fc801095dbfba5286 + winnt-i386 2008eed3965ed9a989a38c22b9c55c02ae9db1f1 + winnt-x86_64 98a48d7a6dbffcd099ea2574a68f04883624d9a1 + 2015-01-24 linux-i386 96213038f850569f1c4fa6a0d146c6155c0d566b linux-x86_64 4d87486493c2881edced7b1d2f8beaac32aaa5b5 diff --git a/tests/support/git.rs b/tests/support/git.rs index 116819fcb..af231e9db 100644 --- a/tests/support/git.rs +++ b/tests/support/git.rs @@ -1,4 +1,6 @@ -use std::old_io::{self, fs, File}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; use url::Url; use git2; @@ -7,14 +9,14 @@ use support::path2url; pub struct RepoBuilder { repo: git2::Repository, - files: Vec, + files: Vec, } pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } impl RepoBuilder { pub fn init(p: &Path) -> RepoBuilder { - fs::mkdir_recursive(&p.dir_path(), old_io::USER_DIR).unwrap(); + fs::create_dir_all(p.parent().unwrap()).unwrap(); let repo = git2::Repository::init(p).unwrap(); { let mut config = repo.config().unwrap(); @@ -24,17 +26,16 @@ impl RepoBuilder { RepoBuilder { repo: repo, files: Vec::new() } } - pub fn file(self, path: &str, contents: T) -> RepoBuilder { + pub fn file(self, path: &str, contents: &str) -> RepoBuilder { let mut me = self.nocommit_file(path, contents); - me.files.push(Path::new(path)); + me.files.push(PathBuf::new(path)); me } - pub fn nocommit_file(self, path: &str, - contents: T) -> RepoBuilder { - let dst = self.repo.path().dir_path().join(path); - fs::mkdir_recursive(&dst.dir_path(), old_io::USER_DIR).unwrap(); - File::create(&dst).write_str(contents.as_slice()).unwrap(); + pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { + let dst = self.repo.workdir().unwrap().join(path); + fs::create_dir_all(dst.parent().unwrap()).unwrap(); + File::create(&dst).unwrap().write_all(contents.as_bytes()).unwrap(); self } @@ -51,5 +52,7 @@ impl RepoBuilder { "Initial commit", &tree, &[]).unwrap(); } - pub fn url(&self) -> Url { path2url(self.repo.path().dir_path()) } + pub fn url(&self) -> Url { + path2url(self.repo.workdir().unwrap().to_path_buf()) + } } diff --git a/tests/support/mod.rs b/tests/support/mod.rs index de7972d89..fcb0b9595 100644 --- a/tests/support/mod.rs +++ b/tests/support/mod.rs @@ -1,10 +1,11 @@ -use std::error::Error; use std::env; +use std::error::Error; +use std::ffi::AsOsStr; use std::fmt; -use std::old_io::fs::{self, PathExtensions}; -use std::old_io::process::{ProcessOutput}; -use std::old_io; -use std::old_path::{Path, BytesContainer}; +use std::fs; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; +use std::process::Output; use std::str::{self, Str}; use url::Url; @@ -12,7 +13,7 @@ use hamcrest as ham; use cargo::util::{process,ProcessBuilder}; use cargo::util::ProcessError; -use support::paths::PathExt; +use support::paths::CargoPathExt; pub mod paths; pub mod git; @@ -26,12 +27,12 @@ pub mod registry; #[derive(PartialEq,Clone)] struct FileBuilder { - path: Path, + path: PathBuf, body: String } impl FileBuilder { - pub fn new(path: Path, body: &str) -> FileBuilder { + pub fn new(path: PathBuf, body: &str) -> FileBuilder { FileBuilder { path: path, body: body.to_string() } } @@ -43,50 +44,50 @@ impl FileBuilder { .with_err_msg(format!("Could not create file; path={}", self.path.display()))); - file.write_str(self.body.as_slice()) + file.write_all(self.body.as_bytes()) .with_err_msg(format!("Could not write to file; path={}", self.path.display())) } - fn dirname(&self) -> Path { - Path::new(self.path.dirname()) + fn dirname(&self) -> &Path { + self.path.parent().unwrap() } } #[derive(PartialEq,Clone)] struct SymlinkBuilder { - dst: Path, - src: Path + dst: PathBuf, + src: PathBuf, } impl SymlinkBuilder { - pub fn new(dst: Path, src: Path) -> SymlinkBuilder { + pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst: dst, src: src } } fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); - fs::symlink(&self.dst, &self.src) + fs::soft_link(&self.dst, &self.src) .with_err_msg(format!("Could not create symlink; dst={} src={}", self.dst.display(), self.src.display())) } - fn dirname(&self) -> Path { - Path::new(self.src.dirname()) + fn dirname(&self) -> &Path { + self.src.parent().unwrap() } } #[derive(PartialEq,Clone)] pub struct ProjectBuilder { name: String, - root: Path, + root: PathBuf, files: Vec, symlinks: Vec } impl ProjectBuilder { - pub fn new(name: &str, root: Path) -> ProjectBuilder { + pub fn new(name: &str, root: PathBuf) -> ProjectBuilder { ProjectBuilder { name: name.to_string(), root: root, @@ -95,50 +96,55 @@ impl ProjectBuilder { } } - pub fn root(&self) -> Path { + pub fn root(&self) -> PathBuf { self.root.clone() } pub fn url(&self) -> Url { path2url(self.root()) } - pub fn bin(&self, b: &str) -> Path { - self.build_dir().join(format!("{}{}", b, env::consts::EXE_SUFFIX)) + pub fn bin(&self, b: &str) -> PathBuf { + self.build_dir().join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } - pub fn release_bin(&self, b: &str) -> Path { - self.build_dir().join("release").join(format!("{}{}", b, - env::consts::EXE_SUFFIX)) + pub fn release_bin(&self, b: &str) -> PathBuf { + self.build_dir().join("release").join(&format!("{}{}", b, + env::consts::EXE_SUFFIX)) } - pub fn target_bin(&self, target: &str, b: &str) -> Path { - self.build_dir().join(target).join(format!("{}{}", b, - env::consts::EXE_SUFFIX)) + pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { + self.build_dir().join(target).join(&format!("{}{}", b, + env::consts::EXE_SUFFIX)) } - pub fn build_dir(&self) -> Path { + pub fn build_dir(&self) -> PathBuf { self.root.join("target") } - pub fn process(&self, program: T) -> ProcessBuilder { - process(program) - .unwrap() - .cwd(self.root()) - .env("HOME", Some(paths::home().display().to_string().as_slice())) + pub fn process(&self, program: &T) -> ProcessBuilder { + let mut p = process(program).unwrap(); + p.cwd(&self.root()).env("HOME", &paths::home()); + return p; + } + + pub fn cargo(&self, cmd: &str) -> ProcessBuilder { + let mut p = self.process(&cargo_dir().join("cargo")); + p.arg(cmd); + return p; } pub fn cargo_process(&self, cmd: &str) -> ProcessBuilder { self.build(); - self.process(cargo_dir().join("cargo")).arg(cmd) + self.cargo(cmd) } - pub fn file(mut self, path: B, - body: S) -> ProjectBuilder { - self.files.push(FileBuilder::new(self.root.join(path), body.as_slice())); + pub fn file(mut self, path: &B, + body: &str) -> ProjectBuilder { + self.files.push(FileBuilder::new(self.root.join(path), body)); self } - pub fn symlink(mut self, dst: T, - src: T) -> ProjectBuilder { + pub fn symlink(mut self, dst: &T, + src: &T) -> ProjectBuilder { self.symlinks.push(SymlinkBuilder::new(self.root.join(dst), self.root.join(src))); self @@ -187,7 +193,7 @@ pub fn project(name: &str) -> ProjectBuilder { // === Helpers === pub fn mkdir_recursive(path: &Path) -> Result<(), String> { - fs::mkdir_recursive(path, old_io::USER_DIR) + fs::create_dir_all(path) .with_err_msg(format!("could not create directory; path={}", path.display())) } @@ -226,12 +232,13 @@ impl ErrMsg for Result { } // Path to cargo executables -pub fn cargo_dir() -> Path { - env::var("CARGO_BIN_PATH").map(Path::new).ok() - .or_else(|| env::current_exe().ok().map(|s| s.dir_path())) - .unwrap_or_else(|| { - panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test") - }) +pub fn cargo_dir() -> PathBuf { + env::var_os("CARGO_BIN_PATH").map(|s| PathBuf::new(&s)).or_else(|| { + env::current_exe().ok().as_ref().and_then(|s| s.parent()) + .map(|s| s.to_path_buf()) + }).unwrap_or_else(|| { + panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test") + }) } /// Returns an absolute path in the filesystem that `path` points to. The @@ -267,34 +274,34 @@ impl Execs { self } - fn match_output(&self, actual: &ProcessOutput) -> ham::MatchResult { + fn match_output(&self, actual: &Output) -> ham::MatchResult { self.match_status(actual) .and(self.match_stdout(actual)) .and(self.match_stderr(actual)) } - fn match_status(&self, actual: &ProcessOutput) -> ham::MatchResult { + fn match_status(&self, actual: &Output) -> ham::MatchResult { match self.expect_exit_code { None => ham::success(), Some(code) => { ham::expect( - actual.status.matches_exit_status(code as isize), + actual.status.code() == Some(code), format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}", actual.status, - String::from_utf8_lossy(actual.output.as_slice()), - String::from_utf8_lossy(actual.error.as_slice()))) + String::from_utf8_lossy(&actual.stdout), + String::from_utf8_lossy(&actual.stderr))) } } } - fn match_stdout(&self, actual: &ProcessOutput) -> ham::MatchResult { - self.match_std(self.expect_stdout.as_ref(), actual.output.as_slice(), - "stdout", actual.error.as_slice()) + fn match_stdout(&self, actual: &Output) -> ham::MatchResult { + self.match_std(self.expect_stdout.as_ref(), &actual.stdout, + "stdout", &actual.stderr) } - fn match_stderr(&self, actual: &ProcessOutput) -> ham::MatchResult { - self.match_std(self.expect_stderr.as_ref(), actual.error.as_slice(), - "stderr", actual.output.as_slice()) + fn match_stderr(&self, actual: &Output) -> ham::MatchResult { + self.match_std(self.expect_stderr.as_ref(), &actual.stderr, + "stderr", &actual.stdout) } fn match_std(&self, expected: Option<&String>, actual: &[u8], @@ -348,8 +355,8 @@ impl Execs { } fn lines_match(expected: &str, mut actual: &str) -> bool { - for part in expected.split_str("[..]") { - match actual.find_str(part) { + for part in expected.split("[..]") { + match actual.find(part) { Some(i) => actual = &actual[i + part.len()..], None => { return false @@ -391,7 +398,13 @@ impl fmt::Display for Execs { } impl ham::Matcher for Execs { - fn matches(&self, process: ProcessBuilder) -> ham::MatchResult { + fn matches(&self, mut process: ProcessBuilder) -> ham::MatchResult { + self.matches(&mut process) + } +} + +impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs { + fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult { let res = process.exec_with_output(); match res { @@ -485,8 +498,8 @@ pub fn basic_lib_manifest(name: &str) -> String { "#, name, name) } -pub fn path2url(p: Path) -> Url { - Url::from_file_path(&p).ok().unwrap() +pub fn path2url(p: PathBuf) -> Url { + Url::from_file_path(&*p).ok().unwrap() } pub static RUNNING: &'static str = " Running"; diff --git a/tests/support/paths.rs b/tests/support/paths.rs index 1a2d63764..8cbd24063 100644 --- a/tests/support/paths.rs +++ b/tests/support/paths.rs @@ -1,91 +1,77 @@ -use std::old_io::IoResult; use std::env; -use std::old_io::fs::{self, PathExtensions}; +use std::fs; +use std::io::prelude::*; +use std::io; +use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; -use std::old_io; - -use cargo::util::realpath; static CARGO_INTEGRATION_TEST_DIR : &'static str = "cit"; static NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT; thread_local!(static TASK_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); -pub fn root() -> Path { - let path = env::current_exe().unwrap() - .dir_path() +pub fn root() -> PathBuf { + env::current_exe().unwrap() + .parent().unwrap() .join(CARGO_INTEGRATION_TEST_DIR) - .join(TASK_ID.with(|my_id| format!("test-{}", my_id))); - realpath(&path).unwrap() + .join(&TASK_ID.with(|my_id| format!("test-{}", my_id))) } -pub fn home() -> Path { +pub fn home() -> PathBuf { root().join("home") } -pub trait PathExt { - fn rm_rf(&self) -> IoResult<()>; - fn mkdir_p(&self) -> IoResult<()>; - fn move_into_the_past(&self) -> IoResult<()>; +pub trait CargoPathExt { + fn rm_rf(&self) -> io::Result<()>; + fn mkdir_p(&self) -> io::Result<()>; + fn move_into_the_past(&self) -> io::Result<()>; } -impl PathExt for Path { +impl CargoPathExt for Path { /* Technically there is a potential race condition, but we don't * care all that much for our tests */ - fn rm_rf(&self) -> IoResult<()> { + fn rm_rf(&self) -> io::Result<()> { if self.exists() { - // On windows, apparently git checks out the database with objects - // set to the permission 444, and apparently you can't unlink a file - // with permissions 444 because you don't have write permissions. - // Whow knew! - // - // If the rmdir fails due to a permission denied error, then go back - // and change everything to have write permissions, then remove - // everything. - match fs::rmdir_recursive(self) { - Err(old_io::IoError { kind: old_io::PermissionDenied, .. }) => {} - e => return e, - } - for path in try!(fs::walk_dir(self)) { - try!(fs::chmod(&path, old_io::USER_RWX)); - } - fs::rmdir_recursive(self) + fs::remove_dir_all(self) } else { Ok(()) } } - fn mkdir_p(&self) -> IoResult<()> { - fs::mkdir_recursive(self, old_io::USER_DIR) + fn mkdir_p(&self) -> io::Result<()> { + fs::create_dir_all(self) } - fn move_into_the_past(&self) -> IoResult<()> { + fn move_into_the_past(&self) -> io::Result<()> { if self.is_file() { try!(time_travel(self)); } else { let target = self.join("target"); for f in try!(fs::walk_dir(self)) { - if target.is_ancestor_of(&f) { continue } + let f = try!(f).path(); + if f.starts_with(&target) { continue } if !f.is_file() { continue } try!(time_travel(&f)); } } return Ok(()); - fn time_travel(path: &Path) -> IoResult<()> { - let stat = try!(path.stat()); + fn time_travel(path: &Path) -> io::Result<()> { + let stat = try!(path.metadata()); let hour = 1000 * 3600; - let newtime = stat.modified - hour; + let newtime = stat.modified() - hour; - // Sadly change_file_times has the same failure mode as the above - // rmdir_recursive :( - match fs::change_file_times(path, newtime, newtime) { - Err(old_io::IoError { kind: old_io::PermissionDenied, .. }) => {} + // Sadly change_file_times has a failure mode where a readonly file + // cannot have its times changed on windows. + match fs::set_file_times(path, newtime, newtime) { + Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => {} e => return e, } - try!(fs::chmod(path, stat.perm | old_io::USER_WRITE)); - fs::change_file_times(path, newtime, newtime) + let mut perms = stat.permissions(); + perms.set_readonly(false); + try!(fs::set_permissions(path, perms)); + fs::set_file_times(path, newtime, newtime) } } } diff --git a/tests/support/registry.rs b/tests/support/registry.rs index 6749f2966..ec8fa0ab3 100644 --- a/tests/support/registry.rs +++ b/tests/support/registry.rs @@ -1,7 +1,9 @@ -use std::old_io::{self, fs, File}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{PathBuf, Path}; -use flate2::CompressionLevel::Default; -use flate2::writer::GzEncoder; +use flate2::Compression::Default; +use flate2::write::GzEncoder; use git2; use serialize::hex::ToHex; use tar::Archive; @@ -12,19 +14,19 @@ use support::paths; use support::git::repo; use cargo::util::Sha256; -pub fn registry_path() -> Path { paths::root().join("registry") } -pub fn registry() -> Url { Url::from_file_path(®istry_path()).ok().unwrap() } -pub fn dl_path() -> Path { paths::root().join("dl") } -pub fn dl_url() -> Url { Url::from_file_path(&dl_path()).ok().unwrap() } +pub fn registry_path() -> PathBuf { paths::root().join("registry") } +pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } +pub fn dl_path() -> PathBuf { paths::root().join("dl") } +pub fn dl_url() -> Url { Url::from_file_path(&*dl_path()).ok().unwrap() } pub fn init() { let config = paths::home().join(".cargo/config"); - fs::mkdir_recursive(&config.dir_path(), old_io::USER_DIR).unwrap(); - File::create(&config).write_str(format!(r#" + fs::create_dir_all(config.parent().unwrap()).unwrap(); + File::create(&config).unwrap().write_all(format!(r#" [registry] index = "{reg}" token = "api-token" - "#, reg = registry()).as_slice()).unwrap(); + "#, reg = registry()).as_bytes()).unwrap(); // Init a new registry repo(®istry_path()) @@ -57,7 +59,7 @@ pub fn mock_archive(name: &str, version: &str, deps: &[(&str, &str, &str)]) { p.build(); let dst = mock_archive_dst(name, version); - fs::mkdir_recursive(&dst.dir_path(), old_io::USER_DIR).unwrap(); + fs::create_dir_all(dst.parent().unwrap()).unwrap(); let f = File::create(&dst).unwrap(); let a = Archive::new(GzEncoder::new(f, Default)); a.append(format!("{}-{}/Cargo.toml", name, version).as_slice(), @@ -67,7 +69,7 @@ pub fn mock_archive(name: &str, version: &str, deps: &[(&str, &str, &str)]) { a.finish().unwrap(); } -pub fn mock_archive_dst(name: &str, version: &str) -> Path { +pub fn mock_archive_dst(name: &str, version: &str) -> PathBuf { dl_path().join(name).join(version).join("download") } @@ -78,8 +80,10 @@ pub fn mock_pkg(name: &str, version: &str, deps: &[(&str, &str, &str)]) { pub fn mock_pkg_yank(name: &str, version: &str, deps: &[(&str, &str, &str)], yanked: bool) { mock_archive(name, version, deps); - let c = File::open(&mock_archive_dst(name, version)).read_to_end().unwrap(); - let line = pkg(name, version, deps, cksum(c.as_slice()).as_slice(), yanked); + let mut c = Vec::new(); + File::open(&mock_archive_dst(name, version)).unwrap() + .read_to_end(&mut c).unwrap(); + let line = pkg(name, version, deps, &cksum(&c), yanked); let file = match name.len() { 1 => format!("1/{}", name), @@ -95,11 +99,13 @@ pub fn publish(file: &str, line: &str) { let mut index = repo.index().unwrap(); { let dst = registry_path().join(file); - let prev = File::open(&dst).read_to_string().unwrap_or(String::new()); - fs::mkdir_recursive(&dst.dir_path(), old_io::USER_DIR).unwrap(); - File::create(&dst).write_str((prev + line + "\n").as_slice()).unwrap(); + let mut prev = String::new(); + let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); + fs::create_dir_all(dst.parent().unwrap()).unwrap(); + File::create(&dst).unwrap() + .write_all((prev + line + "\n").as_bytes()).unwrap(); } - index.add_path(&Path::new(file)).unwrap(); + index.add_path(Path::new(file)).unwrap(); index.write().unwrap(); let id = index.write_tree().unwrap(); let tree = repo.find_tree(id).unwrap(); diff --git a/tests/test_bad_config.rs b/tests/test_bad_config.rs index 9b36fd586..526245c81 100644 --- a/tests/test_bad_config.rs +++ b/tests/test_bad_config.rs @@ -1,4 +1,4 @@ -use support::{project, execs, cargo_dir}; +use support::{project, execs}; use hamcrest::assert_that; fn setup() {} @@ -99,8 +99,8 @@ test!(bad5 { foo = 2 "#); foo.build(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("new") - .arg("-v").arg("foo").cwd(foo.root().join("foo")), + assert_that(foo.cargo("new") + .arg("-v").arg("foo").cwd(&foo.root().join("foo")), execs().with_status(101).with_stderr("\ Failed to create project `foo` at `[..]` diff --git a/tests/test_cargo.rs b/tests/test_cargo.rs index 64e4c758c..e0a3615b3 100644 --- a/tests/test_cargo.rs +++ b/tests/test_cargo.rs @@ -1,8 +1,8 @@ use std::env; use std::ffi::OsString; -use std::old_io::fs; -use std::old_io::{USER_RWX, File}; -use std::old_io; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; use std::str; use cargo::util::process; @@ -16,40 +16,50 @@ fn setup() { /// Add an empty file with executable flags (and platform-dependent suffix). /// TODO: move this to `ProjectBuilder` if other cases using this emerge. fn fake_executable(proj: ProjectBuilder, dir: &Path, name: &str) -> ProjectBuilder { - let path = proj.root().join(dir).join(format!("{}{}", name, env::consts::EXE_SUFFIX)); - mkdir_recursive(&Path::new(path.dirname())).unwrap(); - fs::File::create(&path).unwrap(); - let old_io::FileStat{perm, ..} = fs::stat(&path).unwrap(); - fs::chmod(&path, old_io::OTHER_EXECUTE | perm).unwrap(); - proj + let path = proj.root().join(dir).join(&format!("{}{}", name, + env::consts::EXE_SUFFIX)); + mkdir_recursive(path.parent().unwrap()).unwrap(); + File::create(&path).unwrap(); + make_executable(&path); + return proj; + + #[cfg(unix)] + fn make_executable(p: &Path) { + use std::os::unix::prelude::*; + + let mut perms = fs::metadata(p).unwrap().permissions();; + let mode = perms.mode(); + perms.set_mode(mode | 0o111); + fs::set_permissions(p, perms).unwrap(); + } + #[cfg(windows)] + fn make_executable(_: &Path) {} } -fn path() -> Vec { +fn path() -> Vec { env::split_paths(&env::var_os("PATH").unwrap_or(OsString::new())).collect() } test!(list_commands_looks_at_path { let proj = project("list-non-overlapping"); let proj = fake_executable(proj, &Path::new("path-test"), "cargo-1"); - let pr = process(cargo_dir().join("cargo")) - .unwrap() - .cwd(proj.root()) - .env("HOME", Some(paths::home())); + let mut pr = process(&cargo_dir().join("cargo")).unwrap(); + pr.cwd(&proj.root()) + .env("HOME", &paths::home()); let mut path = path(); path.push(proj.root().join("path-test")); let path = env::join_paths(path.iter()).unwrap(); let output = pr.arg("-v").arg("--list") - .env("PATH", Some(path.to_str().unwrap())); + .env("PATH", &path); let output = output.exec_with_output().unwrap(); - let output = str::from_utf8(output.output.as_slice()).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("\n 1\n"), "missing 1: {}", output); }); test!(find_closest_biuld_to_build { - let pr = process(cargo_dir().join("cargo")).unwrap() - .arg("biuld").cwd(paths::root()) - .env("HOME", Some(paths::home())); + let mut pr = process(&cargo_dir().join("cargo")).unwrap(); + pr.arg("biuld").cwd(&paths::root()).env("HOME", &paths::home()); assert_that(pr, execs().with_status(127) @@ -62,9 +72,8 @@ Did you mean `build`? // if a subcommand is more than 3 edit distance away, we don't make a suggestion test!(find_closest_dont_correct_nonsense { - let pr = process(cargo_dir().join("cargo")).unwrap() - .arg("asdf").cwd(paths::root()) - .env("HOME", Some(paths::home())); + let mut pr = process(&cargo_dir().join("cargo")).unwrap(); + pr.arg("asdf").cwd(&paths::root()).env("HOME", &paths::home()); assert_that(pr, execs().with_status(127) @@ -75,23 +84,24 @@ test!(find_closest_dont_correct_nonsense { test!(override_cargo_home { let root = paths::root(); let my_home = root.join("my_home"); - fs::mkdir(&my_home, USER_RWX).unwrap(); - File::create(&my_home.join("config")).write_str(r#" + fs::create_dir(&my_home).unwrap(); + File::create(&my_home.join("config")).unwrap().write_all(br#" [cargo-new] name = "foo" email = "bar" git = false "#).unwrap(); - assert_that(process(cargo_dir().join("cargo")).unwrap() - .arg("new").arg("foo") - .cwd(paths::root()) - .env("USER", Some("foo")) - .env("HOME", Some(paths::home())) - .env("CARGO_HOME", Some(my_home.clone())), + assert_that(process(&cargo_dir().join("cargo")).unwrap() + .arg("new").arg("foo") + .cwd(&paths::root()) + .env("USER", "foo") + .env("HOME", &paths::home()) + .env("CARGO_HOME", &my_home), execs().with_status(0)); let toml = paths::root().join("foo/Cargo.toml"); - let toml = File::open(&toml).read_to_string().unwrap(); - assert!(toml.as_slice().contains(r#"authors = ["foo "]"#)); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo "]"#)); }); diff --git a/tests/test_cargo_bench.rs b/tests/test_cargo_bench.rs index 801484854..071d44d8a 100644 --- a/tests/test_cargo_bench.rs +++ b/tests/test_cargo_bench.rs @@ -1,9 +1,8 @@ -use std::old_path; use std::str; use support::{project, execs, basic_bin_manifest, basic_lib_manifest}; -use support::{COMPILING, cargo_dir, FRESH, RUNNING}; -use support::paths::PathExt; +use support::{COMPILING, FRESH, RUNNING}; +use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file}; use cargo::util::process; @@ -11,7 +10,7 @@ fn setup() {} test!(cargo_bench_simple { let p = project("foo") - .file("Cargo.toml", basic_bin_manifest("foo").as_slice()) + .file("Cargo.toml", &basic_bin_manifest("foo").as_slice()) .file("src/foo.rs", r#" extern crate test; @@ -31,12 +30,11 @@ test!(cargo_bench_simple { assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("hello\n")); - assert_that(p.process(cargo_dir().join("cargo")).arg("bench"), - execs().with_stdout(format!("\ + assert_that(p.cargo("bench"), + execs().with_stdout(format!("\ {} foo v0.5.0 ({}) {} target[..]release[..]foo-[..] @@ -141,7 +139,7 @@ test!(many_similar_names { "#); let output = p.cargo_process("bench").exec_with_output().unwrap(); - let output = str::from_utf8(output.output.as_slice()).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output); assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output); assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output); @@ -169,24 +167,23 @@ test!(cargo_bench_failing_test { assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("hello\n")); - assert_that(p.process(cargo_dir().join("cargo")).arg("bench"), - execs().with_stdout(format!("\ + assert_that(p.cargo("bench"), + execs().with_stdout(format!("\ {} foo v0.5.0 ({}) {} target[..]release[..]foo-[..] running 1 test test bench_hello ... ", COMPILING, p.url(), RUNNING)) - .with_stderr(format!("\ + .with_stderr("\ thread '
' panicked at 'assertion failed: \ `(left == right) && (right == left)` (left: \ - `\"hello\"`, right: `\"nope\"`)', src{sep}foo.rs:14 + `\"hello\"`, right: `\"nope\"`)', src[..]foo.rs:14 -", sep = old_path::SEP)) +") .with_status(101)); }); @@ -472,7 +469,7 @@ test!(cargo_bench_twice { p.cargo_process("build"); for _ in range(0, 2) { - assert_that(p.process(cargo_dir().join("cargo")).arg("bench"), + assert_that(p.cargo("bench"), execs().with_status(0)); } }); @@ -639,7 +636,7 @@ test!(bin_there_for_integration { "#); let output = p.cargo_process("bench").exec_with_output().unwrap(); - let output = str::from_utf8(output.output.as_slice()).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("main_bench ... bench: 0 ns/iter (+/- 0)"), "no main_bench\n{}", output); @@ -720,7 +717,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured compiling = COMPILING, running = RUNNING, dir = p.url()).as_slice())); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("bench").arg("-v"), + assert_that(p.cargo("bench").arg("-v"), execs().with_status(0) .with_stdout(format!("\ {fresh} bar v0.0.1 ({dir}) @@ -775,7 +772,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured compiling = COMPILING, running = RUNNING, dir = p.url()).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("bench"), + assert_that(p.cargo("bench"), execs().with_status(0) .with_stdout(format!("\ {running} target[..]release[..]foo-[..] @@ -842,18 +839,18 @@ test!(bench_with_examples { execs().with_status(0) .with_stdout(format!("\ {compiling} testbench v6.6.6 ({url}) -{running} `rustc src{sep}lib.rs --crate-name testbench --crate-type lib [..]` -{running} `rustc src{sep}lib.rs --crate-name testbench --crate-type lib [..]` -{running} `rustc benches{sep}testb1.rs --crate-name testb1 --crate-type bin \ +{running} `rustc src[..]lib.rs --crate-name testbench --crate-type lib [..]` +{running} `rustc src[..]lib.rs --crate-name testbench --crate-type lib [..]` +{running} `rustc benches[..]testb1.rs --crate-name testb1 --crate-type bin \ [..] --test [..]` -{running} `{dir}{sep}target{sep}release{sep}testb1-[..] --bench` +{running} `{dir}[..]target[..]release[..]testb1-[..] --bench` running 1 test test bench_bench2 ... bench: 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured -{running} `{dir}{sep}target{sep}release{sep}testbench-[..] --bench` +{running} `{dir}[..]target[..]release[..]testbench-[..] --bench` running 1 test test bench_bench1 ... bench: 0 ns/iter (+/- 0) @@ -864,6 +861,5 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured compiling = COMPILING, running = RUNNING, dir = p.root().display(), - url = p.url(), - sep = old_path::SEP).as_slice())); + url = p.url()).as_slice())); }); diff --git a/tests/test_cargo_build_lib.rs b/tests/test_cargo_build_lib.rs index ee0842b33..21536d25f 100644 --- a/tests/test_cargo_build_lib.rs +++ b/tests/test_cargo_build_lib.rs @@ -45,7 +45,7 @@ test!(build_lib_only { test!(build_with_no_lib { let p = project("foo") - .file("Cargo.toml", basic_bin_manifest("foo")) + .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", r#" fn main() {} "#); diff --git a/tests/test_cargo_clean.rs b/tests/test_cargo_clean.rs index ca6b8a17f..392319873 100644 --- a/tests/test_cargo_clean.rs +++ b/tests/test_cargo_clean.rs @@ -1,4 +1,4 @@ -use support::{project, execs, main_file, basic_bin_manifest, cargo_dir}; +use support::{project, execs, main_file, basic_bin_manifest}; use hamcrest::{assert_that, existing_dir, is_not}; fn setup() { @@ -6,28 +6,27 @@ fn setup() { test!(cargo_clean_simple { let p = project("foo") - .file("Cargo.toml", basic_bin_manifest("foo").as_slice()) - .file("src/foo.rs", main_file(r#""i am foo""#, &[]).as_slice()); + .file("Cargo.toml", &basic_bin_manifest("foo").as_slice()) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[]).as_slice()); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(&p.build_dir(), existing_dir()); - assert_that(p.process(cargo_dir().join("cargo")).arg("clean"), + assert_that(p.cargo("clean"), execs().with_status(0)); assert_that(&p.build_dir(), is_not(existing_dir())); }); test!(different_dir { let p = project("foo") - .file("Cargo.toml", basic_bin_manifest("foo").as_slice()) - .file("src/foo.rs", main_file(r#""i am foo""#, &[]).as_slice()) + .file("Cargo.toml", &basic_bin_manifest("foo").as_slice()) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[]).as_slice()) .file("src/bar/a.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(&p.build_dir(), existing_dir()); - assert_that(p.process(cargo_dir().join("cargo")).arg("clean") - .cwd(p.root().join("src")), + assert_that(p.cargo("clean").cwd(&p.root().join("src")), execs().with_status(0).with_stdout("")); assert_that(&p.build_dir(), is_not(existing_dir())); }); diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index 9c5b93435..ae0022821 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -1,11 +1,11 @@ -use std::old_io::{self, fs, TempDir, File}; use std::env; -use std::old_path; +use std::fs::{self, TempDir, File}; +use std::io::prelude::*; use support::{project, execs, main_file, basic_bin_manifest}; -use support::{COMPILING, RUNNING, cargo_dir, ProjectBuilder}; +use support::{COMPILING, RUNNING, ProjectBuilder}; use hamcrest::{assert_that, existing_file}; -use support::paths::PathExt; +use support::paths::CargoPathExt; use cargo::util::process; fn setup() { @@ -13,25 +13,24 @@ fn setup() { test!(cargo_compile_simple { let p = project("foo") - .file("Cargo.toml", basic_bin_manifest("foo").as_slice()) - .file("src/foo.rs", main_file(r#""i am foo""#, &[]).as_slice()); + .file("Cargo.toml", &basic_bin_manifest("foo").as_slice()) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[]).as_slice()); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("i am foo\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("i am foo\n")); }); test!(cargo_compile_manifest_path { let p = project("foo") - .file("Cargo.toml", basic_bin_manifest("foo").as_slice()) - .file("src/foo.rs", main_file(r#""i am foo""#, &[]).as_slice()); + .file("Cargo.toml", &basic_bin_manifest("foo").as_slice()) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[]).as_slice()); assert_that(p.cargo_process("build") .arg("--manifest-path").arg("foo/Cargo.toml") - .cwd(p.root().dir_path()), + .cwd(p.root().parent().unwrap()), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); }); @@ -116,7 +115,7 @@ Caused by: test!(cargo_compile_without_manifest { let tmpdir = TempDir::new("cargo").unwrap(); - let p = ProjectBuilder::new("foo", tmpdir.path().clone()); + let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()); assert_that(p.cargo_process("build"), execs().with_status(101) @@ -133,14 +132,13 @@ test!(cargo_compile_with_invalid_code { assert_that(p.cargo_process("build"), execs() .with_status(101) - .with_stderr(format!("\ -{filename}:1:1: 1:8 error: expected item[..]found `invalid` -{filename}:1 invalid rust code! + .with_stderr("\ +src[..]foo.rs:1:1: 1:8 error: expected item[..]found `invalid` +src[..]foo.rs:1 invalid rust code! ^~~~~~~ Could not compile `foo`. -To learn more, run the command again with --verbose.\n", - filename = format!("src{}foo.rs", old_path::SEP)).as_slice())); +To learn more, run the command again with --verbose.\n")); assert_that(&p.root().join("Cargo.lock"), existing_file()); }); @@ -176,12 +174,12 @@ test!(cargo_compile_with_warnings_in_the_root_package { assert_that(p.cargo_process("build"), execs() - .with_stderr(format!("\ -{filename}:1:14: 1:26 warning: function is never used: `dead`, #[warn(dead_code)] \ -on by default -{filename}:1 fn main() {{}} fn dead() {{}} - ^~~~~~~~~~~~ -", filename = format!("src{}foo.rs", old_path::SEP).as_slice()))); + .with_stderr("\ +src[..]foo.rs:1:14: 1:26 warning: function is never used: `dead`, \ + #[warn(dead_code)] on by default +src[..]foo.rs:1 fn main() {} fn dead() {} +[..] ^~~~~~~~~~~~ +")); }); test!(cargo_compile_with_warnings_in_a_dep_package { @@ -238,7 +236,7 @@ test!(cargo_compile_with_warnings_in_a_dep_package { assert_that(&p.bin("foo"), existing_file()); assert_that( - process(p.bin("foo")).unwrap(), + process(&p.bin("foo")).unwrap(), execs().with_stdout("test passed\n")); }); @@ -296,7 +294,7 @@ test!(cargo_compile_with_nested_deps_inferred { assert_that(&p.bin("foo"), existing_file()); assert_that( - process(p.bin("foo")).unwrap(), + process(&p.bin("foo")).unwrap(), execs().with_stdout("test passed\n")); }); @@ -354,7 +352,7 @@ test!(cargo_compile_with_nested_deps_correct_bin { assert_that(&p.bin("foo"), existing_file()); assert_that( - process(p.bin("foo")).unwrap(), + process(&p.bin("foo")).unwrap(), execs().with_stdout("test passed\n")); }); @@ -421,7 +419,7 @@ test!(cargo_compile_with_nested_deps_shorthand { assert_that(&p.bin("foo"), existing_file()); assert_that( - process(p.bin("foo")).unwrap(), + process(&p.bin("foo")).unwrap(), execs().with_stdout("test passed\n")); }); @@ -487,9 +485,8 @@ test!(cargo_compile_with_nested_deps_longhand { assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("test passed\n")); }); // Check that Cargo gives a sensible error if a dependency can't be found @@ -545,14 +542,14 @@ test!(compile_path_dep_then_change_version { assert_that(p.cargo_process("build"), execs().with_status(0)); - File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_str(r#" + File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" [package] name = "bar" version = "0.0.2" authors = [] "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(101).with_stderr("\ no matching package named `bar` found (required by `foo`) location searched: [..] @@ -604,12 +601,11 @@ test!(crate_version_env_vars { assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout(format!("0-5-1 @ alpha.1 in {}\n", - p.root().display()).as_slice())); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout(format!("0-5-1 @ alpha.1 in {}\n", + p.root().display()).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0)); }); @@ -635,9 +631,9 @@ test!(many_crate_types_old_style_lib_location { assert_that(p.cargo_process("build"), execs().with_status(0)); - let files = fs::readdir(&p.root().join("target")).unwrap(); - let mut files: Vec = files.iter().filter_map(|f| { - match f.filename_str().unwrap() { + let files = fs::read_dir(&p.root().join("target")).unwrap(); + let mut files: Vec = files.map(|e| e.unwrap().path()).filter_map(|f| { + match f.file_name().unwrap().to_str().unwrap() { "build" | "examples" | "deps" => None, s if s.contains("fingerprint") || s.contains("dSYM") => None, s => Some(s.to_string()) @@ -673,9 +669,9 @@ test!(many_crate_types_correct { assert_that(p.cargo_process("build"), execs().with_status(0)); - let files = fs::readdir(&p.root().join("target")).unwrap(); - let mut files: Vec = files.iter().filter_map(|f| { - match f.filename_str().unwrap() { + let files = fs::read_dir(&p.root().join("target")).unwrap(); + let mut files: Vec = files.map(|f| f.unwrap().path()).filter_map(|f| { + match f.file_name().unwrap().to_str().unwrap() { "build" | "examples" | "deps" => None, s if s.contains("fingerprint") || s.contains("dSYM") => None, s => Some(s.to_string()) @@ -772,9 +768,8 @@ test!(ignore_broken_symlinks { assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("i am foo\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("i am foo\n")); }); test!(missing_lib_and_bin { @@ -813,16 +808,16 @@ test!(lto_build { assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(format!("\ {compiling} test v0.0.0 ({url}) -{running} `rustc src{sep}main.rs --crate-name test --crate-type bin \ +{running} `rustc src[..]main.rs --crate-name test --crate-type bin \ -C opt-level=3 \ -C lto \ --cfg ndebug \ - --out-dir {dir}{sep}target{sep}release \ + --out-dir {dir}[..]target[..]release \ --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release \ - -L dependency={dir}{sep}target{sep}release{sep}deps` + -L dependency={dir}[..]target[..]release \ + -L dependency={dir}[..]target[..]release[..]deps` ", -running = RUNNING, compiling = COMPILING, sep = old_path::SEP, +running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), ))); @@ -842,15 +837,15 @@ test!(verbose_build { assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0).with_stdout(format!("\ {compiling} test v0.0.0 ({url}) -{running} `rustc src{sep}lib.rs --crate-name test --crate-type lib -g \ +{running} `rustc src[..]lib.rs --crate-name test --crate-type lib -g \ -C metadata=[..] \ -C extra-filename=-[..] \ - --out-dir {dir}{sep}target \ + --out-dir {dir}[..]target \ --emit=dep-info,link \ - -L dependency={dir}{sep}target \ - -L dependency={dir}{sep}target{sep}deps` + -L dependency={dir}[..]target \ + -L dependency={dir}[..]target[..]deps` ", -running = RUNNING, compiling = COMPILING, sep = old_path::SEP, +running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), ))); @@ -870,17 +865,17 @@ test!(verbose_release_build { assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(format!("\ {compiling} test v0.0.0 ({url}) -{running} `rustc src{sep}lib.rs --crate-name test --crate-type lib \ +{running} `rustc src[..]lib.rs --crate-name test --crate-type lib \ -C opt-level=3 \ --cfg ndebug \ -C metadata=[..] \ -C extra-filename=-[..] \ - --out-dir {dir}{sep}target{sep}release \ + --out-dir {dir}[..]target[..]release \ --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release \ - -L dependency={dir}{sep}target{sep}release{sep}deps` + -L dependency={dir}[..]target[..]release \ + -L dependency={dir}[..]target[..]release[..]deps` ", -running = RUNNING, compiling = COMPILING, sep = old_path::SEP, +running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), ))); @@ -915,35 +910,34 @@ test!(verbose_release_build_deps { assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.0 ({url}) -{running} `rustc foo{sep}src{sep}lib.rs --crate-name foo \ +{running} `rustc foo[..]src[..]lib.rs --crate-name foo \ --crate-type dylib --crate-type rlib -C prefer-dynamic \ -C opt-level=3 \ --cfg ndebug \ -C metadata=[..] \ -C extra-filename=-[..] \ - --out-dir {dir}{sep}target{sep}release{sep}deps \ + --out-dir {dir}[..]target[..]release[..]deps \ --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release{sep}deps \ - -L dependency={dir}{sep}target{sep}release{sep}deps` + -L dependency={dir}[..]target[..]release[..]deps \ + -L dependency={dir}[..]target[..]release[..]deps` {compiling} test v0.0.0 ({url}) -{running} `rustc src{sep}lib.rs --crate-name test --crate-type lib \ +{running} `rustc src[..]lib.rs --crate-name test --crate-type lib \ -C opt-level=3 \ --cfg ndebug \ -C metadata=[..] \ -C extra-filename=-[..] \ - --out-dir {dir}{sep}target{sep}release \ + --out-dir {dir}[..]target[..]release \ --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release \ - -L dependency={dir}{sep}target{sep}release{sep}deps \ - --extern foo={dir}{sep}target{sep}release{sep}deps{sep}\ + -L dependency={dir}[..]target[..]release \ + -L dependency={dir}[..]target[..]release[..]deps \ + --extern foo={dir}[..]target[..]release[..]deps[..]\ {prefix}foo-[..]{suffix} \ - --extern foo={dir}{sep}target{sep}release{sep}deps{sep}libfoo-[..].rlib` + --extern foo={dir}[..]target[..]release[..]deps[..]libfoo-[..].rlib` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), - sep = old_path::SEP, prefix = env::consts::DLL_PREFIX, suffix = env::consts::DLL_SUFFIX).as_slice())); }); @@ -983,9 +977,9 @@ test!(explicit_examples { "#); assert_that(p.cargo_process("test"), execs().with_status(0)); - assert_that(process(p.bin("examples/hello")).unwrap(), + assert_that(process(&p.bin("examples/hello")).unwrap(), execs().with_stdout("Hello, World!\n")); - assert_that(process(p.bin("examples/goodbye")).unwrap(), + assert_that(process(&p.bin("examples/goodbye")).unwrap(), execs().with_stdout("Goodbye, World!\n")); }); @@ -1004,23 +998,27 @@ test!(implicit_examples { "#) .file("examples/hello.rs", r#" extern crate world; - fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); } + fn main() { + println!("{}, {}!", world::get_hello(), world::get_world()); + } "#) .file("examples/goodbye.rs", r#" extern crate world; - fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); } + fn main() { + println!("{}, {}!", world::get_goodbye(), world::get_world()); + } "#); assert_that(p.cargo_process("test"), execs().with_status(0)); - assert_that(process(p.bin("examples/hello")).unwrap(), + assert_that(process(&p.bin("examples/hello")).unwrap(), execs().with_stdout("Hello, World!\n")); - assert_that(process(p.bin("examples/goodbye")).unwrap(), + assert_that(process(&p.bin("examples/goodbye")).unwrap(), execs().with_stdout("Goodbye, World!\n")); }); test!(standard_build_no_ndebug { let p = project("world") - .file("Cargo.toml", basic_bin_manifest("foo")) + .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" fn main() { if cfg!(ndebug) { @@ -1032,12 +1030,13 @@ test!(standard_build_no_ndebug { "#); assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(process(p.bin("foo")).unwrap(), execs().with_stdout("slow\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("slow\n")); }); test!(release_build_ndebug { let p = project("world") - .file("Cargo.toml", basic_bin_manifest("foo")) + .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" fn main() { if cfg!(ndebug) { @@ -1050,7 +1049,8 @@ test!(release_build_ndebug { assert_that(p.cargo_process("build").arg("--release"), execs().with_status(0)); - assert_that(process(p.bin("release/foo")).unwrap(), execs().with_stdout("fast\n")); + assert_that(process(&p.bin("release/foo")).unwrap(), + execs().with_stdout("fast\n")); }); test!(inferred_main_bin { @@ -1066,7 +1066,7 @@ test!(inferred_main_bin { "#); assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(process(p.bin("foo")).unwrap(), execs().with_status(0)); + assert_that(process(&p.bin("foo")).unwrap(), execs().with_status(0)); }); test!(deletion_causes_failure { @@ -1116,7 +1116,7 @@ test!(bad_cargo_toml_in_target_dir { .file("target/Cargo.toml", "bad-toml"); assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(process(p.bin("foo")).unwrap(), execs().with_status(0)); + assert_that(process(&p.bin("foo")).unwrap(), execs().with_status(0)); }); test!(lib_with_standard_name { @@ -1263,7 +1263,7 @@ test!(freshness_ignores_excluded { foo.build(); foo.root().move_into_the_past().unwrap(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.0 ({url}) @@ -1271,14 +1271,14 @@ test!(freshness_ignores_excluded { // Smoke test to make sure it doesn't compile again println!("first pass"); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); // Modify an ignored file and make sure we don't rebuild println!("second pass"); File::create(&foo.root().join("src/bar.rs")).unwrap(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); }); @@ -1309,15 +1309,14 @@ test!(rebuild_preserves_out_dir { foo.build(); foo.root().move_into_the_past().unwrap(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build") - .env("FIRST", Some("1")), + assert_that(foo.cargo("build").env("FIRST", "1"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.0 ({url}) ", compiling = COMPILING, url = foo.url()))); File::create(&foo.root().join("src/bar.rs")).unwrap(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.0 ({url}) @@ -1364,11 +1363,13 @@ test!(recompile_space_in_name { .file("src/my lib.rs", ""); assert_that(foo.cargo_process("build"), execs().with_status(0)); foo.root().move_into_the_past().unwrap(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0).with_stdout("")); }); +#[cfg(unix)] test!(ignore_bad_directories { + use std::os::unix::prelude::*; let foo = project("foo") .file("Cargo.toml", r#" [package] @@ -1378,10 +1379,16 @@ test!(ignore_bad_directories { "#) .file("src/lib.rs", ""); foo.build(); - fs::mkdir(&foo.root().join("tmp"), old_io::USER_EXEC ^ old_io::USER_EXEC).unwrap(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + let dir = foo.root().join("tmp"); + fs::create_dir(&dir).unwrap(); + let stat = fs::metadata(&dir).unwrap(); + let mut perms = stat.permissions(); + perms.set_mode(0o644); + fs::set_permissions(&dir, perms.clone()).unwrap(); + assert_that(foo.cargo("build"), execs().with_status(0)); - fs::chmod(&foo.root().join("tmp"), old_io::USER_DIR).unwrap(); + perms.set_mode(0o755); + fs::set_permissions(&dir, perms).unwrap(); }); test!(bad_cargo_config { @@ -1446,9 +1453,8 @@ test!(cargo_platform_specific_dependency { assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("test passed\n")); }); #[cfg(not(all(any(target_arch = "x86", target_arch = "x86_64"), target_os = "linux")))] @@ -1513,19 +1519,16 @@ test!(cargo_platform_specific_dependency_wrong_platform { invalid rust file, should not be compiled "#); - p.cargo_process("build") - .exec_with_output() - .unwrap(); + p.cargo_process("build").exec_with_output().unwrap(); assert_that(&p.bin("foo"), existing_file()); + assert_that(process(&p.bin("foo")).unwrap(), + execs()); - assert_that( - process(p.bin("foo")).unwrap(), - execs()); - - let lockfile = p.root().join("Cargo.lock"); - let lockfile = File::open(&lockfile).read_to_string().unwrap(); - assert!(lockfile.as_slice().contains("bar")) + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap(); + assert!(lockfile.contains("bar")) }); test!(example_bin_same_name { @@ -1546,7 +1549,7 @@ test!(example_bin_same_name { assert_that(&p.bin("foo"), existing_file()); assert_that(&p.bin("examples/foo"), existing_file()); - p.process(cargo_dir().join("cargo")).arg("test").arg("--no-run") + p.cargo("test").arg("--no-run") .exec_with_output() .unwrap(); @@ -1572,8 +1575,8 @@ test!(compile_then_delete { use std::time::duration::Duration; sleep(Duration::milliseconds(100)); } - fs::unlink(&p.bin("foo")).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("run"), + fs::remove_file(&p.bin("foo")).unwrap(); + assert_that(p.cargo("run"), execs().with_status(0)); }); diff --git a/tests/test_cargo_compile_custom_build.rs b/tests/test_cargo_compile_custom_build.rs index c356812fd..5ef4c238f 100644 --- a/tests/test_cargo_compile_custom_build.rs +++ b/tests/test_cargo_compile_custom_build.rs @@ -1,9 +1,10 @@ use std::env; -use std::old_io::{File, fs}; +use std::fs::{self, File}; +use std::io::prelude::*; -use support::{project, execs, cargo_dir}; +use support::{project, execs}; use support::{COMPILING, RUNNING, DOCTEST}; -use support::paths::PathExt; +use support::paths::CargoPathExt; use hamcrest::{assert_that}; fn setup() { @@ -38,7 +39,8 @@ test!(custom_build_script_failed { url = p.url(), compiling = COMPILING, running = RUNNING)) .with_stderr(format!("\ failed to run custom build command for `foo v0.5.0 ({})` -Process didn't exit successfully: `[..]build[..]build-script-build[..]` (status=101)", +Process didn't exit successfully: `[..]build[..]build-script-build[..]` \ + (exit code: 101)", p.url()))); }); @@ -77,30 +79,26 @@ test!(custom_build_env_vars { let file_content = format!(r#" use std::env; - use std::old_io::fs::PathExtensions; + use std::io::prelude::*; + use std::path::Path; + fn main() {{ let _target = env::var("TARGET").unwrap(); - let _ncpus = env::var("NUM_JOBS").unwrap(); - - let out = env::var("CARGO_MANIFEST_DIR").unwrap(); - let p1 = Path::new(out); - let cwd = env::current_dir().unwrap(); - let p2 = cwd.join(Path::new(file!()).dir_path().dir_path()); - assert!(p1 == p2, "{{}} != {{}}", p1.display(), p2.display()); + let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); let opt = env::var("OPT_LEVEL").unwrap(); - assert_eq!(opt.as_slice(), "0"); + assert_eq!(opt, "0"); let opt = env::var("PROFILE").unwrap(); - assert_eq!(opt.as_slice(), "compile"); + assert_eq!(opt, "compile"); let debug = env::var("DEBUG").unwrap(); - assert_eq!(debug.as_slice(), "true"); + assert_eq!(debug, "true"); let out = env::var("OUT_DIR").unwrap(); - assert!(out.as_slice().starts_with(r"{0}")); - assert!(Path::new(out).is_dir()); + assert!(out.starts_with(r"{0}")); + assert!(Path::new(&out).is_dir()); let _host = env::var("HOST").unwrap(); @@ -109,7 +107,7 @@ test!(custom_build_env_vars { "#, p.root().join("target").join("build").display()); - let p = p.file("bar/build.rs", file_content); + let p = p.file("bar/build.rs", &file_content); assert_that(p.cargo_process("build").arg("--features").arg("bar_feat"), @@ -397,7 +395,7 @@ test!(only_rerun_build_script { File::create(&p.root().join("some-new-file")).unwrap(); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build").arg("-v"), + assert_that(p.cargo("build").arg("-v"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.5.0 (file://[..]) @@ -427,7 +425,7 @@ test!(rebuild_continues_to_pass_env_vars { a.root().move_into_the_past().unwrap(); let p = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" @@ -453,7 +451,7 @@ test!(rebuild_continues_to_pass_env_vars { File::create(&p.root().join("some-new-file")).unwrap(); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build").arg("-v"), + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); }); @@ -478,7 +476,7 @@ test!(testing_and_such { File::create(&p.root().join("src/lib.rs")).unwrap(); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("test").arg("-vj1"), + assert_that(p.cargo("test").arg("-vj1"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.5.0 (file://[..]) @@ -500,7 +498,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("doc").arg("-v"), + assert_that(p.cargo("doc").arg("-v"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.5.0 (file://[..]) @@ -508,8 +506,9 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {running} `rustc [..]` ", compiling = COMPILING, running = RUNNING).as_slice())); - File::create(&p.root().join("src/main.rs")).write_str("fn main() {}").unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("run"), + File::create(&p.root().join("src/main.rs")).unwrap() + .write_all(b"fn main() {}").unwrap(); + assert_that(p.cargo("run"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.5.0 (file://[..]) @@ -636,7 +635,7 @@ test!(build_deps_not_for_normal { "#) .file("a/src/lib.rs", ""); - assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(target), + assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(101) .with_stderr("\ [..]lib.rs[..] error: can't find crate for `aaaaa` @@ -738,7 +737,7 @@ test!(out_dir_is_preserved { p.root().move_into_the_past().unwrap(); // Change to asserting that it's there - File::create(&p.root().join("build.rs")).write_str(r#" + File::create(&p.root().join("build.rs")).unwrap().write_all(br#" use std::env; use std::old_io::File; fn main() { @@ -747,16 +746,16 @@ test!(out_dir_is_preserved { } "#).unwrap(); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build").arg("-v"), + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); // Run a fresh build where file should be preserved - assert_that(p.process(cargo_dir().join("cargo")).arg("build").arg("-v"), + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); // One last time to make sure it's still there. File::create(&p.root().join("foo")).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build").arg("-v"), + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); }); @@ -1026,12 +1025,12 @@ test!(build_script_with_dynamic_native_dependency { assert_that(build.cargo_process("build"), execs().with_status(0).with_stderr("")); let src = build.root().join("target"); - let lib = fs::readdir(&src).unwrap().into_iter().find(|lib| { - let lib = lib.filename_str().unwrap(); + let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| { + let lib = lib.file_name().unwrap().to_str().unwrap(); lib.starts_with(env::consts::DLL_PREFIX) && lib.ends_with(env::consts::DLL_SUFFIX) }).unwrap(); - let libname = lib.filename_str().unwrap(); + let libname = lib.file_name().unwrap().to_str().unwrap(); let libname = &libname[env::consts::DLL_PREFIX.len().. libname.len() - env::consts::DLL_SUFFIX.len()]; @@ -1066,7 +1065,7 @@ test!(build_script_with_dynamic_native_dependency { println!("cargo:rustc-flags=-L {}", src.dir_path().display()); } "#) - .file("bar/src/lib.rs", format!(r#" + .file("bar/src/lib.rs", &format!(r#" pub fn bar() {{ #[link(name = "{}")] extern {{ fn foo(); }} @@ -1074,7 +1073,7 @@ test!(build_script_with_dynamic_native_dependency { }} "#, libname)); - assert_that(foo.cargo_process("build").env("SRC", Some(lib.as_vec())), + assert_that(foo.cargo_process("build").env("SRC", &lib), execs().with_status(0)); }); diff --git a/tests/test_cargo_compile_git_deps.rs b/tests/test_cargo_compile_git_deps.rs index 799834552..c8bb12f6d 100644 --- a/tests/test_cargo_compile_git_deps.rs +++ b/tests/test_cargo_compile_git_deps.rs @@ -1,12 +1,13 @@ -use std::old_io::fs; -use std::old_io::{timer, File}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::old_io::timer; +use std::path::Path; use std::time::Duration; use git2; -use support::{ProjectBuilder, project, execs, main_file}; -use support::{cargo_dir, path2url}; +use support::{ProjectBuilder, project, execs, main_file, path2url}; use support::{COMPILING, UPDATING, RUNNING}; -use support::paths::{self, PathExt}; +use support::paths::{self, CargoPathExt}; use hamcrest::{assert_that,existing_file}; use cargo; use cargo::util::{ProcessError, process}; @@ -42,9 +43,9 @@ fn add(repo: &git2::Repository) { } let mut index = repo.index().unwrap(); index.add_all(["*"].iter(), git2::ADD_DEFAULT, - Some((&mut (|&: a: &[u8], _b: &[u8]| { - if s.iter().any(|s| a.starts_with(s.path().as_vec())) {1} else {0} - })) as &mut git2::IndexMatchedPath)).unwrap(); + Some(&mut (|a, _b| { + if s.iter().any(|s| a.starts_with(s.path())) {1} else {0} + }))).unwrap(); index.write().unwrap(); } @@ -72,7 +73,7 @@ fn commit(repo: &git2::Repository) -> git2::Oid { let parents = parents.iter().collect::>(); repo.commit(Some("HEAD"), &sig, &sig, "test", &repo.find_tree(tree_id).unwrap(), - parents.as_slice()).unwrap() + &parents).unwrap() } test!(cargo_compile_simple_git_dep { @@ -98,7 +99,7 @@ test!(cargo_compile_simple_git_dep { }).unwrap(); let project = project - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -113,7 +114,7 @@ test!(cargo_compile_simple_git_dep { name = "foo" "#, git_project.url())) - .file("src/foo.rs", main_file(r#""{}", dep1::hello()"#, &["dep1"])); + .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); let root = project.root(); let git_root = git_project.root(); @@ -131,7 +132,7 @@ test!(cargo_compile_simple_git_dep { assert_that(&project.bin("foo"), existing_file()); assert_that( - cargo::util::process(project.bin("foo")).unwrap(), + cargo::util::process(&project.bin("foo")).unwrap(), execs().with_stdout("hello world\n")); }); @@ -164,7 +165,7 @@ test!(cargo_compile_git_dep_branch { repo.branch("branchy", &head, true, None, None).unwrap(); let project = project - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -180,7 +181,7 @@ test!(cargo_compile_git_dep_branch { name = "foo" "#, git_project.url())) - .file("src/foo.rs", main_file(r#""{}", dep1::hello()"#, &["dep1"])); + .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); let root = project.root(); let git_root = git_project.root(); @@ -198,7 +199,7 @@ test!(cargo_compile_git_dep_branch { assert_that(&project.bin("foo"), existing_file()); assert_that( - cargo::util::process(project.bin("foo")).unwrap(), + cargo::util::process(&project.bin("foo")).unwrap(), execs().with_stdout("hello world\n")); }); @@ -234,7 +235,7 @@ test!(cargo_compile_git_dep_tag { false).unwrap(); let project = project - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -250,7 +251,7 @@ test!(cargo_compile_git_dep_tag { name = "foo" "#, git_project.url())) - .file("src/foo.rs", main_file(r#""{}", dep1::hello()"#, &["dep1"])); + .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); let root = project.root(); let git_root = git_project.root(); @@ -266,10 +267,10 @@ test!(cargo_compile_git_dep_tag { assert_that(&project.bin("foo"), existing_file()); - assert_that(cargo::util::process(project.bin("foo")).unwrap(), + assert_that(cargo::util::process(&project.bin("foo")).unwrap(), execs().with_stdout("hello world\n")); - assert_that(project.process(cargo_dir().join("cargo")).arg("build"), + assert_that(project.cargo("build"), execs().with_status(0)); }); @@ -318,7 +319,7 @@ test!(cargo_compile_with_nested_paths { }).unwrap(); let p = project("parent") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "parent" @@ -343,9 +344,8 @@ test!(cargo_compile_with_nested_paths { assert_that(&p.bin("parent"), existing_file()); - assert_that( - cargo::util::process(p.bin("parent")).unwrap(), - execs().with_stdout("hello world\n")); + assert_that(cargo::util::process(&p.bin("parent")).unwrap(), + execs().with_stdout("hello world\n")); }); test!(cargo_compile_with_meta_package { @@ -386,7 +386,7 @@ test!(cargo_compile_with_meta_package { }).unwrap(); let p = project("parent") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "parent" @@ -416,16 +416,15 @@ test!(cargo_compile_with_meta_package { assert_that(&p.bin("parent"), existing_file()); - assert_that( - cargo::util::process(p.bin("parent")).unwrap(), - execs().with_stdout("this is dep1 this is dep2\n")); + assert_that(cargo::util::process(&p.bin("parent")).unwrap(), + execs().with_stdout("this is dep1 this is dep2\n")); }); test!(cargo_compile_with_short_ssh_git { let url = "git@github.com:a/dep"; let project = project("project") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -440,7 +439,7 @@ test!(cargo_compile_with_short_ssh_git { name = "foo" "#, url)) - .file("src/foo.rs", main_file(r#""{}", dep1::hello()"#, &["dep1"])); + .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); assert_that(project.cargo_process("build"), execs() @@ -468,14 +467,14 @@ test!(two_revs_same_deps { let rev1 = repo.revparse_single("HEAD").unwrap().id(); // Commit the changes and make sure we trigger a recompile - File::create(&bar.root().join("src/lib.rs")).write_str(r#" + File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 2 } "#).unwrap(); add(&repo); let rev2 = commit(&repo); let foo = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.0" @@ -499,7 +498,7 @@ test!(two_revs_same_deps { "#); let baz = project("baz") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [package] name = "baz" version = "0.0.0" @@ -519,7 +518,7 @@ test!(two_revs_same_deps { assert_that(foo.cargo_process("build"), execs().with_status(0)); assert_that(&foo.bin("foo"), existing_file()); - assert_that(foo.process(foo.bin("foo")), execs().with_status(0)); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); }); test!(recompilation { @@ -541,7 +540,7 @@ test!(recompilation { }).unwrap(); let p = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -570,23 +569,23 @@ test!(recompilation { COMPILING, p.url()))); // Don't recompile the second time - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout("")); // Modify a file manually, shouldn't trigger a recompile - File::create(&git_project.root().join("src/bar.rs")).write_str(r#" + File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" pub fn bar() { println!("hello!"); } "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout("")); - assert_that(p.process(cargo_dir().join("cargo")).arg("update"), + assert_that(p.cargo("update"), execs().with_stdout(format!("{} git repository `{}`", UPDATING, git_project.url()))); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout("")); // Commit the changes and make sure we don't trigger a recompile because the @@ -596,27 +595,27 @@ test!(recompilation { commit(&repo); println!("compile after commit"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout("")); p.root().move_into_the_past().unwrap(); // Update the dependency and carry on! - assert_that(p.process(cargo_dir().join("cargo")).arg("update"), + assert_that(p.cargo("update"), execs().with_stdout(format!("{} git repository `{}`", UPDATING, git_project.url()))); println!("going for the last compile"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} bar v0.5.0 ({}#[..])\n\ {} foo v0.5.0 ({})\n", COMPILING, git_project.url(), COMPILING, p.url()))); // Make sure clean only cleans one dep - assert_that(p.process(cargo_dir().join("cargo")).arg("clean") + assert_that(p.cargo("clean") .arg("-p").arg("foo"), execs().with_stdout("")); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} foo v0.5.0 ({})\n", COMPILING, p.url()))); }); @@ -656,7 +655,7 @@ test!(update_with_shared_deps { extern crate dep2; fn main() {} "#) - .file("dep1/Cargo.toml", format!(r#" + .file("dep1/Cargo.toml", &format!(r#" [package] name = "dep1" version = "0.5.0" @@ -667,7 +666,7 @@ test!(update_with_shared_deps { git = '{}' "#, git_project.url())) .file("dep1/src/lib.rs", "") - .file("dep2/Cargo.toml", format!(r#" + .file("dep2/Cargo.toml", &format!(r#" [package] name = "dep2" version = "0.5.0" @@ -691,7 +690,7 @@ test!(update_with_shared_deps { compiling = COMPILING, dir = p.url()))); // Modify a file manually, and commit it - File::create(&git_project.root().join("src/bar.rs")).write_str(r#" + File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" pub fn bar() { println!("hello!"); } "#).unwrap(); let repo = git2::Repository::open(&git_project.root()).unwrap(); @@ -703,21 +702,21 @@ test!(update_with_shared_deps { // By default, not transitive updates println!("dep1 update"); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") + assert_that(p.cargo("update") .arg("-p").arg("dep1"), execs().with_stdout("")); // Specifying a precise rev to the old rev shouldn't actually update // anything because we already have the rev in the db. println!("bar precise update"); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") + assert_that(p.cargo("update") .arg("-p").arg("bar") - .arg("--precise").arg(old_head.to_string()), + .arg("--precise").arg(&old_head.to_string()), execs().with_stdout("")); // Updating aggressively should, however, update the repo. println!("dep1 aggressive update"); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") + assert_that(p.cargo("update") .arg("-p").arg("dep1") .arg("--aggressive"), execs().with_stdout(format!("{} git repository `{}`", @@ -726,7 +725,7 @@ test!(update_with_shared_deps { // Make sure we still only compile one version of the git repo println!("build"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("\ {compiling} bar v0.5.0 ({git}#[..]) {compiling} [..] v0.5.0 ({dir}) @@ -736,8 +735,7 @@ test!(update_with_shared_deps { compiling = COMPILING, dir = p.url()))); // We should be able to update transitive deps - assert_that(p.process(cargo_dir().join("cargo")).arg("update") - .arg("-p").arg("bar"), + assert_that(p.cargo("update").arg("-p").arg("bar"), execs().with_stdout(format!("{} git repository `{}`", UPDATING, git_project.url()))); @@ -755,17 +753,16 @@ test!(dep_with_submodule { "#) }).unwrap(); let git_project2 = git_repo("dep2", |project| { - project - .file("lib.rs", "pub fn dep() {}") + project.file("lib.rs", "pub fn dep() {}") }).unwrap(); let repo = git2::Repository::open(&git_project.root()).unwrap(); let url = path2url(git_project2.root()).to_string(); - add_submodule(&repo, url.as_slice(), &Path::new("src")); + add_submodule(&repo, &url, Path::new("src")); commit(&repo); let project = project - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -782,7 +779,7 @@ test!(dep_with_submodule { "); assert_that(project.cargo_process("build"), - execs().with_stderr("").with_status(0)); + execs().with_stderr("").with_status(0)); }); test!(two_deps_only_update_one { @@ -809,7 +806,7 @@ test!(two_deps_only_update_one { }).unwrap(); let project = project - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -837,14 +834,14 @@ test!(two_deps_only_update_one { COMPILING, project.url())) .with_stderr("")); - File::create(&git1.root().join("src/lib.rs")).write_str(r#" + File::create(&git1.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn foo() {} "#).unwrap(); let repo = git2::Repository::open(&git1.root()).unwrap(); add(&repo); commit(&repo); - assert_that(project.process(cargo_dir().join("cargo")).arg("update") + assert_that(project.cargo("update") .arg("-p").arg("dep1"), execs() .with_stdout(format!("{} git repository `{}`\n", @@ -866,7 +863,7 @@ test!(stale_cached_version { // Update the git database in the cache with the current state of the git // repo let foo = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.0" @@ -882,11 +879,11 @@ test!(stale_cached_version { "#); assert_that(foo.cargo_process("build"), execs().with_status(0)); - assert_that(foo.process(foo.bin("foo")), execs().with_status(0)); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); // Update the repo, and simulate someone else updating the lockfile and then // us pulling it down. - File::create(&bar.root().join("src/lib.rs")).write_str(r#" + File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 1 + 0 } "#).unwrap(); let repo = git2::Repository::open(&bar.root()).unwrap(); @@ -897,7 +894,7 @@ test!(stale_cached_version { let rev = repo.revparse_single("HEAD").unwrap().id(); - File::create(&foo.root().join("Cargo.lock")).write_str(format!(r#" + File::create(&foo.root().join("Cargo.lock")).unwrap().write_all(format!(r#" [root] name = "foo" version = "0.0.0" @@ -909,17 +906,17 @@ test!(stale_cached_version { name = "bar" version = "0.0.0" source = 'git+{url}#{hash}' - "#, url = bar.url(), hash = rev).as_slice()).unwrap(); + "#, url = bar.url(), hash = rev).as_bytes()).unwrap(); // Now build! - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(format!("\ {updating} git repository `{bar}` {compiling} bar v0.0.0 ({bar}#[..]) {compiling} foo v0.0.0 ({foo}) ", updating = UPDATING, compiling = COMPILING, bar = bar.url(), foo = foo.url()))); - assert_that(foo.process(foo.bin("foo")), execs().with_status(0)); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); }); test!(dep_with_changed_submodule { @@ -950,7 +947,7 @@ test!(dep_with_changed_submodule { commit(&repo); let project = project - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" @@ -978,9 +975,9 @@ test!(dep_with_changed_submodule { .with_stderr("") .with_status(0)); - let mut file = File::create(&git_project.root().join(".gitmodules")); - file.write_str(format!("[submodule \"src\"]\n\tpath = src\n\turl={}", - git_project3.url()).as_slice()).unwrap(); + File::create(&git_project.root().join(".gitmodules")).unwrap() + .write_all(format!("[submodule \"src\"]\n\tpath = src\n\turl={}", + git_project3.url()).as_bytes()).unwrap(); // Sync the submodule and reset it to the new remote. sub.sync().unwrap(); @@ -1002,7 +999,7 @@ test!(dep_with_changed_submodule { timer::sleep(Duration::milliseconds(1000)); // Update the dependency and carry on! println!("update"); - assert_that(project.process(cargo_dir().join("cargo")).arg("update").arg("-v"), + assert_that(project.cargo("update").arg("-v"), execs() .with_stderr("") .with_stdout(format!("{} git repository `{}`", @@ -1010,7 +1007,7 @@ test!(dep_with_changed_submodule { git_project.url()))); println!("last run"); - assert_that(project.process(cargo_dir().join("cargo")).arg("run"), execs() + assert_that(project.cargo("run"), execs() .with_stdout(format!("{compiling} dep1 v0.5.0 ([..])\n\ {compiling} foo v0.5.0 ([..])\n\ {running} `target[..]foo`\n\ @@ -1035,7 +1032,7 @@ test!(dev_deps_with_testing { }).unwrap(); let p = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" @@ -1066,7 +1063,7 @@ test!(dev_deps_with_testing { // Make sure we use the previous resolution of `bar` instead of updating it // a second time. - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_stdout(format!("\ {compiling} [..] v0.5.0 ([..]) {compiling} [..] v0.5.0 ([..] @@ -1099,7 +1096,7 @@ test!(git_build_cmd_freshness { timer::sleep(Duration::milliseconds(1000)); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.0 ({url}) @@ -1107,14 +1104,14 @@ test!(git_build_cmd_freshness { // Smoke test to make sure it doesn't compile again println!("first pass"); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); // Modify an ignored file and make sure we don't rebuild println!("second pass"); File::create(&foo.root().join("src/bar.rs")).unwrap(); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build"), + assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); }); @@ -1138,7 +1135,7 @@ test!(git_name_not_always_needed { let _ = cfg.remove("user.email"); let p = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" @@ -1171,7 +1168,7 @@ test!(git_repo_changing_no_rebuild { // Lock p1 to the first rev in the git repo let p1 = project("p1") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "p1" version = "0.5.0" @@ -1184,7 +1181,7 @@ test!(git_repo_changing_no_rebuild { .file("build.rs", "fn main() {}"); p1.build(); p1.root().move_into_the_past().unwrap(); - assert_that(p1.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p1.cargo("build"), execs().with_stdout(format!("\ {updating} git repository `{bar}` {compiling} [..] @@ -1192,7 +1189,7 @@ test!(git_repo_changing_no_rebuild { ", updating = UPDATING, compiling = COMPILING, bar = bar.url()))); // Make a commit to lock p2 to a different rev - File::create(&bar.root().join("src/lib.rs")).write_str(r#" + File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 2 } "#).unwrap(); let repo = git2::Repository::open(&bar.root()).unwrap(); @@ -1201,7 +1198,7 @@ test!(git_repo_changing_no_rebuild { // Lock p2 to the second rev let p2 = project("p2") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "p2" version = "0.5.0" @@ -1219,7 +1216,7 @@ test!(git_repo_changing_no_rebuild { // And now for the real test! Make sure that p1 doesn't get rebuilt // even though the git repo has changed. - assert_that(p1.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p1.cargo("build"), execs().with_stdout("")); }); @@ -1269,24 +1266,21 @@ test!(git_dep_build_cmd { p.root().join("bar").move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), - execs().with_status(0)); + assert_that(p.cargo("build"), + execs().with_status(0)); - assert_that( - cargo::util::process(p.bin("foo")).unwrap(), - execs().with_stdout("0\n")); + assert_that(cargo::util::process(&p.bin("foo")).unwrap(), + execs().with_stdout("0\n")); // Touching bar.rs.in should cause the `build` command to run again. - let mut file = fs::File::create(&p.root().join("bar/src/bar.rs.in")).unwrap(); - file.write_str(r#"pub fn gimme() -> i32 { 1 }"#).unwrap(); - drop(file); + fs::File::create(&p.root().join("bar/src/bar.rs.in")).unwrap() + .write_all(b"pub fn gimme() -> i32 { 1 }").unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), - execs().with_status(0)); + assert_that(p.cargo("build"), + execs().with_status(0)); - assert_that( - cargo::util::process(p.bin("foo")).unwrap(), - execs().with_stdout("1\n")); + assert_that(cargo::util::process(&p.bin("foo")).unwrap(), + execs().with_stdout("1\n")); }); test!(fetch_downloads { @@ -1301,7 +1295,7 @@ test!(fetch_downloads { }).unwrap(); let p = project("p1") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "p1" version = "0.5.0" @@ -1315,7 +1309,7 @@ test!(fetch_downloads { {updating} git repository `{url}` ", updating = UPDATING, url = bar.url()))); - assert_that(p.process(cargo_dir().join("cargo")).arg("fetch"), + assert_that(p.cargo("fetch"), execs().with_status(0).with_stdout("")); }); @@ -1331,7 +1325,7 @@ test!(warnings_in_git_dep { }).unwrap(); let p = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" @@ -1372,7 +1366,7 @@ test!(update_ambiguous { .file("src/lib.rs", "") }).unwrap(); let bar = git_repo("bar", |project| { - project.file("Cargo.toml", format!(r#" + project.file("Cargo.toml", &format!(r#" [package] name = "bar" version = "0.5.0" @@ -1385,7 +1379,7 @@ test!(update_ambiguous { }).unwrap(); let p = project("project") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" @@ -1398,7 +1392,7 @@ test!(update_ambiguous { .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") + assert_that(p.cargo("update") .arg("-p").arg("foo"), execs().with_status(101) .with_stderr("\ @@ -1430,7 +1424,7 @@ test!(update_one_dep_in_repo_with_many_deps { }).unwrap(); let p = project("project") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" @@ -1443,7 +1437,7 @@ test!(update_one_dep_in_repo_with_many_deps { .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") + assert_that(p.cargo("update") .arg("-p").arg("foo"), execs().with_status(0) .with_stdout(format!("\ @@ -1462,7 +1456,7 @@ test!(switch_deps_does_not_update_transitive { .file("src/lib.rs", "") }).unwrap(); let dep1 = git_repo("dep1", |project| { - project.file("Cargo.toml", format!(r#" + project.file("Cargo.toml", &format!(r#" [package] name = "dep" version = "0.5.0" @@ -1474,7 +1468,7 @@ test!(switch_deps_does_not_update_transitive { .file("src/lib.rs", "") }).unwrap(); let dep2 = git_repo("dep2", |project| { - project.file("Cargo.toml", format!(r#" + project.file("Cargo.toml", &format!(r#" [package] name = "dep" version = "0.5.0" @@ -1487,7 +1481,7 @@ test!(switch_deps_does_not_update_transitive { }).unwrap(); let p = project("project") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" @@ -1498,7 +1492,7 @@ test!(switch_deps_does_not_update_transitive { .file("src/main.rs", "fn main() {}"); p.build(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(format!("\ Updating git repository `{}` @@ -1510,16 +1504,16 @@ Updating git repository `{}` // Update the dependency to point to the second repository, but this // shouldn't update the transitive dependency which is the same. - File::create(&p.root().join("Cargo.toml")).write_str(format!(r#" + File::create(&p.root().join("Cargo.toml")).unwrap().write_all(format!(r#" [project] name = "project" version = "0.5.0" authors = [] [dependencies.dep] git = '{}' - "#, dep2.url()).as_slice()).unwrap(); + "#, dep2.url()).as_bytes()).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(format!("\ Updating git repository `{}` @@ -1550,7 +1544,7 @@ test!(update_one_source_updates_all_packages_in_that_git_source { }).unwrap(); let p = project("project") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" @@ -1561,25 +1555,25 @@ test!(update_one_source_updates_all_packages_in_that_git_source { .file("src/main.rs", "fn main() {}"); p.build(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0)); let repo = git2::Repository::open(&dep.root()).unwrap(); let rev1 = repo.revparse_single("HEAD").unwrap().id(); // Just be sure to change a file - File::create(&dep.root().join("src/lib.rs")).write_str(r#" + File::create(&dep.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 2 } "#).unwrap(); add(&repo); commit(&repo); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") - .arg("-p").arg("dep"), + assert_that(p.cargo("update").arg("-p").arg("dep"), execs().with_status(0)); - let lockfile = File::open(&p.root().join("Cargo.lock")).read_to_string() - .unwrap(); - assert!(!lockfile.as_slice().contains(rev1.to_string().as_slice()), + let mut lockfile = String::new(); + File::open(&p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lockfile).unwrap(); + assert!(!lockfile.contains(rev1.to_string().as_slice()), "{} in {}", rev1, lockfile); }); @@ -1613,7 +1607,7 @@ test!(switch_sources { path = "b" "#) .file("src/main.rs", "fn main() {}") - .file("b/Cargo.toml", format!(r#" + .file("b/Cargo.toml", &format!(r#" [project] name = "b" version = "0.5.0" @@ -1624,7 +1618,7 @@ test!(switch_sources { .file("b/src/lib.rs", "fn main() {}"); p.build(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(format!("\ {updating} git repository `file://[..]a1` @@ -1633,16 +1627,16 @@ test!(switch_sources { {compiling} project v0.5.0 ([..]) ", updating = UPDATING, compiling = COMPILING).as_slice())); - File::create(&p.root().join("b/Cargo.toml")).write_str(format!(r#" + File::create(&p.root().join("b/Cargo.toml")).unwrap().write_all(format!(r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies.a] git = '{}' - "#, a2.url()).as_slice()).unwrap(); + "#, a2.url()).as_bytes()).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(format!("\ {updating} git repository `file://[..]a2` @@ -1669,7 +1663,7 @@ test!(dont_require_submodules_are_checked_out { let repo = git2::Repository::open(&git1.root()).unwrap(); let url = path2url(git2.root()).to_string(); - add_submodule(&repo, url.as_slice(), &Path::new("submodule")); + add_submodule(&repo, &url, &Path::new("submodule")); commit(&repo); git2::Repository::init(&project.root()).unwrap(); @@ -1677,7 +1671,6 @@ test!(dont_require_submodules_are_checked_out { let dst = paths::home().join("foo"); git2::Repository::clone(&url, &dst).unwrap(); - assert_that(git1.process(cargo_dir().join("cargo")).arg("build").arg("-v") - .cwd(dst), + assert_that(git1.cargo("build").arg("-v").cwd(&dst), execs().with_status(0)); }); diff --git a/tests/test_cargo_compile_path_deps.rs b/tests/test_cargo_compile_path_deps.rs index 9286d7ed2..b2dfb2ab2 100644 --- a/tests/test_cargo_compile_path_deps.rs +++ b/tests/test_cargo_compile_path_deps.rs @@ -1,9 +1,11 @@ -use std::old_io::{fs, File, USER_RWX, timer}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::old_io::timer; use std::time::Duration; -use support::{project, execs, main_file, cargo_dir}; +use support::{project, execs, main_file}; use support::{COMPILING, RUNNING}; -use support::paths::{self, PathExt}; +use support::paths::{self, CargoPathExt}; use hamcrest::{assert_that, existing_file}; use cargo; use cargo::util::{process}; @@ -82,21 +84,19 @@ test!(cargo_compile_with_nested_deps_shorthand { assert_that(&p.bin("foo"), existing_file()); - assert_that( - cargo::util::process(p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n").with_status(0)); + assert_that(cargo::util::process(&p.bin("foo")).unwrap(), + execs().with_stdout("test passed\n").with_status(0)); println!("cleaning"); - assert_that(p.process(cargo_dir().join("cargo")).arg("clean"), + assert_that(p.cargo("clean"), execs().with_stdout("").with_status(0)); println!("building baz"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build") - .arg("-p").arg("baz"), + assert_that(p.cargo("build").arg("-p").arg("baz"), execs().with_status(0) .with_stdout(format!("{} baz v0.5.0 ({})\n", COMPILING, p.url()))); println!("building foo"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build") + assert_that(p.cargo("build") .arg("-p").arg("foo"), execs().with_status(0) .with_stdout(format!("{} bar v0.5.0 ({})\n\ @@ -239,9 +239,8 @@ test!(cargo_compile_with_transitive_dev_deps { assert_that(&p.bin("foo"), existing_file()); - assert_that( - cargo::util::process(p.bin("foo")).unwrap(), - execs().with_stdout("zoidberg\n")); + assert_that(cargo::util::process(&p.bin("foo")).unwrap(), + execs().with_stdout("zoidberg\n")); }); test!(no_rebuild_dependency { @@ -280,12 +279,12 @@ test!(no_rebuild_dependency { COMPILING, p.url(), COMPILING, p.url()))); // This time we shouldn't compile bar - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout("")); p.root().move_into_the_past().unwrap(); p.build(); // rebuild the files (rewriting them in the process) - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), @@ -347,7 +346,7 @@ test!(deep_dependencies_trigger_rebuild { COMPILING, p.url(), COMPILING, p.url(), COMPILING, p.url()))); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout("")); // Make sure an update to baz triggers a rebuild of bar @@ -355,10 +354,10 @@ test!(deep_dependencies_trigger_rebuild { // We base recompilation off mtime, so sleep for at least a second to ensure // that this write will change the mtime. timer::sleep(Duration::seconds(1)); - File::create(&p.root().join("baz/src/baz.rs")).write_str(r#" + File::create(&p.root().join("baz/src/baz.rs")).unwrap().write_all(br#" pub fn baz() { println!("hello!"); } "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} baz v0.5.0 ({})\n\ {} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", @@ -368,11 +367,11 @@ test!(deep_dependencies_trigger_rebuild { // Make sure an update to bar doesn't trigger baz timer::sleep(Duration::seconds(1)); - File::create(&p.root().join("bar/src/bar.rs")).write_str(r#" + File::create(&p.root().join("bar/src/bar.rs")).unwrap().write_all(br#" extern crate baz; pub fn bar() { println!("hello!"); baz::baz(); } "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), @@ -437,7 +436,7 @@ test!(no_rebuild_two_deps { COMPILING, p.url(), COMPILING, p.url()))); assert_that(&p.bin("foo"), existing_file()); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout("")); assert_that(&p.bin("foo"), existing_file()); }); @@ -483,12 +482,12 @@ test!(nested_deps_recompile { COMPILING, p.url()))); p.root().move_into_the_past().unwrap(); - File::create(&p.root().join("src/foo.rs")).write_str(r#" + File::create(&p.root().join("src/foo.rs")).unwrap().write_all(br#" fn main() {} "#).unwrap(); // This shouldn't recompile `bar` - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} foo v0.5.0 ({})\n", COMPILING, p.url()))); }); @@ -515,8 +514,8 @@ test!(error_message_for_missing_manifest { assert_that(p.cargo_process("build"), execs() .with_status(101) - .with_stderr(format!("Could not find `Cargo.toml` in `{}`\n", - p.root().join_many(&["src", "bar"]).display()))); + .with_stderr(&format!("Could not find `Cargo.toml` in `{}`\n", + p.root().join("src").join("bar").display()))); }); @@ -531,13 +530,12 @@ test!(override_relative { "#) .file("src/lib.rs", ""); - fs::mkdir(&paths::root().join(".cargo"), USER_RWX).unwrap(); - File::create(&paths::root().join(".cargo/config")).write_str(r#" - paths = ["bar"] - "#).unwrap(); + fs::create_dir(&paths::root().join(".cargo")).unwrap(); + File::create(&paths::root().join(".cargo/config")).unwrap() + .write_all(br#"paths = ["bar"]"#).unwrap(); let p = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [package] name = "foo" @@ -567,10 +565,10 @@ test!(override_self { let p = project("foo"); let root = p.root().clone(); let p = p - .file(".cargo/config", format!(r#" + .file(".cargo/config", &format!(r#" paths = ['{}'] "#, root.display())) - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [package] name = "foo" @@ -610,11 +608,11 @@ test!(override_path_dep { .file("p2/src/lib.rs", ""); let p = project("foo") - .file(".cargo/config", format!(r#" + .file(".cargo/config", &format!(r#" paths = ['{}', '{}'] "#, bar.root().join("p1").display(), bar.root().join("p2").display())) - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [package] name = "foo" @@ -666,10 +664,9 @@ test!(path_dep_build_cmd { name = "bar" "#) .file("bar/build.rs", r#" - use std::old_io::fs; + use std::fs; fn main() { - fs::copy(&Path::new("src/bar.rs.in"), - &Path::new("src/bar.rs")).unwrap(); + fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); } "#) .file("bar/src/bar.rs.in", r#" @@ -679,7 +676,7 @@ test!(path_dep_build_cmd { p.build(); p.root().join("bar").move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), @@ -687,25 +684,23 @@ test!(path_dep_build_cmd { assert_that(&p.bin("foo"), existing_file()); - assert_that( - cargo::util::process(p.bin("foo")).unwrap(), - execs().with_stdout("0\n")); + assert_that(cargo::util::process(&p.bin("foo")).unwrap(), + execs().with_stdout("0\n")); // Touching bar.rs.in should cause the `build` command to run again. { - let mut file = fs::File::create(&p.root().join("bar/src/bar.rs.in")).unwrap(); - file.write_str(r#"pub fn gimme() -> int { 1 }"#).unwrap(); + let file = fs::File::create(&p.root().join("bar/src/bar.rs.in")); + file.unwrap().write_all(br#"pub fn gimme() -> int { 1 }"#).unwrap(); } - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_stdout(format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); - assert_that( - cargo::util::process(p.bin("foo")).unwrap(), - execs().with_stdout("1\n")); + assert_that(cargo::util::process(&p.bin("foo")).unwrap(), + execs().with_stdout("1\n")); }); test!(dev_deps_no_rebuild_lib { @@ -736,13 +731,13 @@ test!(dev_deps_no_rebuild_lib { "#) .file("bar/src/lib.rs", "pub fn bar() {}"); p.build(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build") - .env("FOO", Some("bar")), + assert_that(p.cargo("build") + .env("FOO", "bar"), execs().with_status(0) .with_stdout(format!("{} foo v0.5.0 ({})\n", COMPILING, p.url()))); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(format!("\ {} [..] v0.5.0 ({}) diff --git a/tests/test_cargo_compile_plugins.rs b/tests/test_cargo_compile_plugins.rs index ca44a4dbf..0b093756c 100644 --- a/tests/test_cargo_compile_plugins.rs +++ b/tests/test_cargo_compile_plugins.rs @@ -1,7 +1,7 @@ -use std::old_io::fs; +use std::fs; use std::env; -use support::{project, execs, cargo_dir}; +use support::{project, execs}; use hamcrest::assert_that; fn setup() { @@ -78,7 +78,7 @@ test!(plugin_to_the_max { assert_that(foo.cargo_process("build"), execs().with_status(0)); - assert_that(foo.process(cargo_dir().join("cargo")).arg("doc"), + assert_that(foo.cargo("doc"), execs().with_status(0)); }); @@ -101,12 +101,12 @@ test!(plugin_with_dynamic_native_dependency { assert_that(build.cargo_process("build"), execs().with_status(0).with_stderr("")); let src = build.root().join("target"); - let lib = fs::readdir(&src).unwrap().into_iter().find(|lib| { - let lib = lib.filename_str().unwrap(); + let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| { + let lib = lib.file_name().unwrap().to_str().unwrap(); lib.starts_with(env::consts::DLL_PREFIX) && lib.ends_with(env::consts::DLL_SUFFIX) }).unwrap(); - let libname = lib.filename_str().unwrap(); + let libname = lib.file_name().unwrap().to_str().unwrap(); let libname = &libname[env::consts::DLL_PREFIX.len().. libname.len() - env::consts::DLL_SUFFIX.len()]; @@ -146,7 +146,7 @@ test!(plugin_with_dynamic_native_dependency { println!("cargo:rustc-flags=-L {}", src.dir_path().display()); } "#) - .file("bar/src/lib.rs", format!(r#" + .file("bar/src/lib.rs", &format!(r#" #![feature(plugin_registrar)] extern crate rustc; @@ -162,7 +162,7 @@ test!(plugin_with_dynamic_native_dependency { }} "#, libname)); - assert_that(foo.cargo_process("build").env("SRC", Some(lib.as_vec())), + assert_that(foo.cargo_process("build").env("SRC", &lib), execs().with_status(0)); }); diff --git a/tests/test_cargo_cross_compile.rs b/tests/test_cargo_cross_compile.rs index cf6b9a586..915f14b60 100644 --- a/tests/test_cargo_cross_compile.rs +++ b/tests/test_cargo_cross_compile.rs @@ -1,8 +1,7 @@ use std::env; -use std::old_path; use support::{project, execs, basic_bin_manifest}; -use support::{RUNNING, COMPILING, DOCTEST, cargo_dir}; +use support::{RUNNING, COMPILING, DOCTEST}; use hamcrest::{assert_that, existing_file}; use cargo::util::process; use cargo::ops::rustc_version; @@ -59,9 +58,8 @@ test!(simple_cross { execs().with_status(0)); assert_that(&p.target_bin(target, "foo"), existing_file()); - assert_that( - process(p.target_bin(target, "foo")).unwrap(), - execs().with_status(0)); + assert_that(process(&p.target_bin(target, "foo")).unwrap(), + execs().with_status(0)); }); test!(simple_deps { @@ -96,9 +94,8 @@ test!(simple_deps { execs().with_status(0)); assert_that(&p.target_bin(target, "foo"), existing_file()); - assert_that( - process(p.target_bin(target, "foo")).unwrap(), - execs().with_status(0)); + assert_that(process(&p.target_bin(target, "foo")).unwrap(), + execs().with_status(0)); }); test!(plugin_deps { @@ -173,9 +170,8 @@ test!(plugin_deps { execs().with_status(0)); assert_that(&foo.target_bin(target, "foo"), existing_file()); - assert_that( - process(foo.target_bin(target, "foo")).unwrap(), - execs().with_status(0)); + assert_that(process(&foo.target_bin(target, "foo")).unwrap(), + execs().with_status(0)); }); test!(plugin_to_the_max { @@ -253,14 +249,13 @@ test!(plugin_to_the_max { assert_that(foo.cargo_process("build").arg("--target").arg(target).arg("-v"), execs().with_status(0)); println!("second"); - assert_that(foo.process(cargo_dir().join("cargo")).arg("build").arg("-v") + assert_that(foo.cargo("build").arg("-v") .arg("--target").arg(target), execs().with_status(0)); assert_that(&foo.target_bin(target, "foo"), existing_file()); - assert_that( - process(foo.target_bin(target, "foo")).unwrap(), - execs().with_status(0)); + assert_that(process(&foo.target_bin(target, "foo")).unwrap(), + execs().with_status(0)); }); test!(linker_and_ar { @@ -287,19 +282,18 @@ test!(linker_and_ar { .with_stdout(format!("\ {compiling} foo v0.5.0 ({url}) {running} `rustc src/foo.rs --crate-name foo --crate-type bin -g \ - --out-dir {dir}{sep}target{sep}{target} \ + --out-dir {dir}[..]target[..]{target} \ --emit=dep-info,link \ --target {target} \ -C ar=my-ar-tool -C linker=my-linker-tool \ - -L dependency={dir}{sep}target{sep}{target} \ - -L dependency={dir}{sep}target{sep}{target}{sep}deps` + -L dependency={dir}[..]target[..]{target} \ + -L dependency={dir}[..]target[..]{target}[..]deps` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), target = target, - sep = old_path::SEP, ).as_slice())); }); @@ -481,11 +475,11 @@ test!(cross_with_a_build_script { execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.0 (file://[..]) -{running} `rustc build.rs [..] --out-dir {dir}{sep}target{sep}build{sep}foo-[..]` -{running} `{dir}{sep}target{sep}build{sep}foo-[..]build-script-build` -{running} `rustc src{sep}main.rs [..] --target {target} [..]` +{running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..]` +{running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` +{running} `rustc src[..]main.rs [..] --target {target} [..]` ", compiling = COMPILING, running = RUNNING, target = target, - dir = p.root().display(), sep = old_path::SEP).as_slice())); + dir = p.root().display()).as_slice())); }); test!(build_script_needed_for_host_and_target { @@ -550,24 +544,24 @@ test!(build_script_needed_for_host_and_target { execs().with_status(0) .with_stdout(format!("\ {compiling} d1 v0.0.0 (file://{dir}) -{running} `rustc d1{sep}build.rs [..] --out-dir {dir}{sep}target{sep}build{sep}d1-[..]` -{running} `{dir}{sep}target{sep}build{sep}d1-[..]build-script-build` -{running} `{dir}{sep}target{sep}build{sep}d1-[..]build-script-build` -{running} `rustc d1{sep}src{sep}lib.rs [..] --target {target} [..] \ +{running} `rustc d1[..]build.rs [..] --out-dir {dir}[..]target[..]build[..]d1-[..]` +{running} `{dir}[..]target[..]build[..]d1-[..]build-script-build` +{running} `{dir}[..]target[..]build[..]d1-[..]build-script-build` +{running} `rustc d1[..]src[..]lib.rs [..] --target {target} [..] \ -L /path/to/{target}` -{running} `rustc d1{sep}src{sep}lib.rs [..] \ +{running} `rustc d1[..]src[..]lib.rs [..] \ -L /path/to/{host}` {compiling} d2 v0.0.0 (file://{dir}) -{running} `rustc d2{sep}src{sep}lib.rs [..] \ +{running} `rustc d2[..]src[..]lib.rs [..] \ -L /path/to/{host}` {compiling} foo v0.0.0 (file://{dir}) -{running} `rustc build.rs [..] --out-dir {dir}{sep}target{sep}build{sep}foo-[..] \ +{running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..] \ -L /path/to/{host}` -{running} `{dir}{sep}target{sep}build{sep}foo-[..]build-script-build` -{running} `rustc src{sep}main.rs [..] --target {target} [..] \ +{running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` +{running} `rustc src[..]main.rs [..] --target {target} [..] \ -L /path/to/{target}` ", compiling = COMPILING, running = RUNNING, target = target, host = host, - dir = p.root().display(), sep = old_path::SEP).as_slice())); + dir = p.root().display()).as_slice())); }); test!(build_deps_for_the_right_arch { diff --git a/tests/test_cargo_doc.rs b/tests/test_cargo_doc.rs index 3612b0312..27fb250e5 100644 --- a/tests/test_cargo_doc.rs +++ b/tests/test_cargo_doc.rs @@ -1,4 +1,4 @@ -use support::{project, execs, cargo_dir, path2url}; +use support::{project, execs, path2url}; use support::COMPILING; use hamcrest::{assert_that, existing_file, existing_dir, is_not}; @@ -68,7 +68,7 @@ test!(doc_twice { compiling = COMPILING, dir = path2url(p.root())).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("doc"), + assert_that(p.cargo("doc"), execs().with_status(0).with_stdout("")) }); @@ -109,8 +109,8 @@ test!(doc_deps { assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); - assert_that(p.process(cargo_dir().join("cargo")).arg("doc") - .env("RUST_LOG", Some("cargo::ops::cargo_rustc::fingerprint")), + assert_that(p.cargo("doc") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), execs().with_status(0).with_stdout("")); assert_that(&p.root().join("target/doc"), existing_dir()); diff --git a/tests/test_cargo_features.rs b/tests/test_cargo_features.rs index 9f39f22e6..05069a31e 100644 --- a/tests/test_cargo_features.rs +++ b/tests/test_cargo_features.rs @@ -1,8 +1,9 @@ -use std::old_io::File; +use std::fs::File; +use std::io::prelude::*; -use support::{project, execs, cargo_dir}; +use support::{project, execs}; use support::{COMPILING, FRESH}; -use support::paths::PathExt; +use support::paths::CargoPathExt; use hamcrest::assert_that; fn setup() { @@ -247,15 +248,15 @@ test!(no_feature_doesnt_build { execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()).as_slice())); - assert_that(p.process(p.bin("foo")), execs().with_status(0).with_stdout("")); + assert_that(p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("")); - assert_that(p.process(cargo_dir().join("cargo")).arg("build") - .arg("--features").arg("bar"), + assert_that(p.cargo("build").arg("--features").arg("bar"), execs().with_status(0).with_stdout(format!("\ {compiling} bar v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()).as_slice())); - assert_that(p.process(p.bin("foo")), + assert_that(p.process(&p.bin("foo")), execs().with_status(0).with_stdout("bar\n")); }); @@ -295,15 +296,15 @@ test!(default_feature_pulled_in { {compiling} bar v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()).as_slice())); - assert_that(p.process(p.bin("foo")), + assert_that(p.process(&p.bin("foo")), execs().with_status(0).with_stdout("bar\n")); - assert_that(p.process(cargo_dir().join("cargo")).arg("build") - .arg("--no-default-features"), + assert_that(p.cargo("build").arg("--no-default-features"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()).as_slice())); - assert_that(p.process(p.bin("foo")), execs().with_status(0).with_stdout("")); + assert_that(p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("")); }); test!(cyclic_feature { @@ -537,7 +538,7 @@ test!(many_features_no_rebuilds { ", compiling = COMPILING, dir = p.url()).as_slice())); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build").arg("-v"), + assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(format!("\ {fresh} a v0.1.0 ([..]) {fresh} b v0.1.0 ([..]) @@ -647,8 +648,9 @@ test!(everything_in_the_lockfile { .file("d3/src/lib.rs", ""); assert_that(p.cargo_process("fetch"), execs().with_status(0)); - let lockfile = p.root().join("Cargo.lock"); - let lockfile = File::open(&lockfile).read_to_string().unwrap(); + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap(); assert!(lockfile.contains(r#"name = "d1""#), "d1 not found\n{}", lockfile); assert!(lockfile.contains(r#"name = "d2""#), "d2 not found\n{}", lockfile); assert!(lockfile.contains(r#"name = "d3""#), "d3 not found\n{}", lockfile); diff --git a/tests/test_cargo_freshness.rs b/tests/test_cargo_freshness.rs index a4d302a86..394541be3 100644 --- a/tests/test_cargo_freshness.rs +++ b/tests/test_cargo_freshness.rs @@ -1,8 +1,9 @@ -use std::old_io::{fs, File}; +use std::fs::{self, File}; +use std::io::prelude::*; use support::{project, execs, path2url}; -use support::{COMPILING, cargo_dir}; -use support::paths::PathExt; +use support::COMPILING; +use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file}; fn setup() {} @@ -25,19 +26,20 @@ test!(modifying_and_moving { {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); p.root().move_into_the_past().unwrap(); p.root().join("target").move_into_the_past().unwrap(); - File::create(&p.root().join("src/a.rs")).write_str("fn main() {}").unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + File::create(&p.root().join("src/a.rs")).unwrap() + .write_all(b"fn main() {}").unwrap(); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(101)); }); @@ -62,7 +64,7 @@ test!(modify_only_some_files { execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); @@ -70,15 +72,15 @@ test!(modify_only_some_files { let lib = p.root().join("src/lib.rs"); let bin = p.root().join("src/b.rs"); - File::create(&lib).write_str("invalid rust code").unwrap(); + File::create(&lib).unwrap().write_all(b"invalid rust code").unwrap(); lib.move_into_the_past().unwrap(); p.root().move_into_the_past().unwrap(); - File::create(&bin).write_str("fn foo() {}").unwrap(); + File::create(&bin).unwrap().write_all(b"fn foo() {}").unwrap(); // Make sure the binary is rebuilt, not the lib - assert_that(p.process(cargo_dir().join("cargo")).arg("build") - .env("RUST_LOG", Some("cargo::ops::cargo_rustc::fingerprint")), + assert_that(p.cargo("build") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); @@ -119,20 +121,20 @@ test!(rebuild_sub_package_then_while_package { assert_that(p.cargo_process("build"), execs().with_status(0)); - File::create(&p.root().join("b/src/lib.rs")).unwrap().write_str(r#" + File::create(&p.root().join("b/src/lib.rs")).unwrap().write_all(br#" pub fn b() {} "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build").arg("-pb"), + assert_that(p.cargo("build").arg("-pb"), execs().with_status(0)); - File::create(&p.root().join("src/lib.rs")).unwrap().write_str(r#" + File::create(&p.root().join("src/lib.rs")).unwrap().write_all(br#" extern crate a; extern crate b; pub fn toplevel() {} "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0)); }); @@ -155,20 +157,19 @@ test!(changing_features_is_ok { [..]Compiling foo v0.0.1 ([..]) ")); - assert_that(p.process(cargo_dir().join("cargo")).arg("build") - .arg("--features").arg("foo"), + assert_that(p.cargo("build").arg("--features").arg("foo"), execs().with_status(0) .with_stdout("\ [..]Compiling foo v0.0.1 ([..]) ")); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0) .with_stdout("\ [..]Compiling foo v0.0.1 ([..]) ")); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0) .with_stdout("")); }); diff --git a/tests/test_cargo_generate_lockfile.rs b/tests/test_cargo_generate_lockfile.rs index a9c1c420f..ddfa48ea7 100644 --- a/tests/test_cargo_generate_lockfile.rs +++ b/tests/test_cargo_generate_lockfile.rs @@ -1,6 +1,7 @@ -use std::old_io::File; +use std::fs::File; +use std::io::prelude::*; -use support::{project, execs, cargo_dir}; +use support::{project, execs}; use hamcrest::assert_that; fn setup() {} @@ -22,11 +23,11 @@ test!(ignores_carriage_return { execs().with_status(0)); let lockfile = p.root().join("Cargo.lock"); - let lock = File::open(&lockfile).read_to_string(); - let lock = lock.unwrap(); - let lock = lock.as_slice().replace("\n", "\r\n"); - File::create(&lockfile).write_str(lock.as_slice()).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + let mut lock = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); + let lock = lock.replace("\n", "\r\n"); + File::create(&lockfile).unwrap().write_all(lock.as_bytes()).unwrap(); + assert_that(p.cargo("build"), execs().with_status(0)); }); @@ -52,10 +53,11 @@ test!(adding_and_removing_packages { let lockfile = p.root().join("Cargo.lock"); let toml = p.root().join("Cargo.toml"); - let lock1 = File::open(&lockfile).read_to_string().unwrap(); + let mut lock1 = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock1).unwrap(); // add a dep - File::create(&toml).write_str(r#" + File::create(&toml).unwrap().write_all(br#" [package] name = "foo" authors = [] @@ -64,34 +66,37 @@ test!(adding_and_removing_packages { [dependencies.bar] path = "bar" "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("generate-lockfile"), + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); - let lock2 = File::open(&lockfile).read_to_string().unwrap(); + let mut lock2 = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock2).unwrap(); assert!(lock1 != lock2); // change the dep - File::create(&p.root().join("bar/Cargo.toml")).write_str(r#" + File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" [package] name = "bar" authors = [] version = "0.0.2" "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("generate-lockfile"), + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); - let lock3 = File::open(&lockfile).read_to_string().unwrap(); + let mut lock3 = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock3).unwrap(); assert!(lock1 != lock3); assert!(lock2 != lock3); // remove the dep - File::create(&toml).write_str(r#" + File::create(&toml).unwrap().write_all(br#" [package] name = "foo" authors = [] version = "0.0.1" "#).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("generate-lockfile"), + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); - let lock4 = File::open(&lockfile).read_to_string().unwrap(); + let mut lock4 = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock4).unwrap(); assert_eq!(lock1, lock4); }); @@ -122,19 +127,23 @@ foo = "bar" "#; let lockfile = p.root().join("Cargo.lock"); { - let lock = File::open(&lockfile).read_to_string().unwrap(); - File::create(&lockfile).write_str((lock + metadata).as_slice()).unwrap(); + let mut lock = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); + let data = lock + metadata; + File::create(&lockfile).unwrap().write_all(data.as_bytes()).unwrap(); } // Build and make sure the metadata is still there - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0)); - let lock = File::open(&lockfile).read_to_string().unwrap(); - assert!(lock.as_slice().contains(metadata.trim()), "{}", lock); + let mut lock = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); + assert!(lock.contains(metadata.trim()), "{}", lock); // Update and make sure the metadata is still there - assert_that(p.process(cargo_dir().join("cargo")).arg("update"), + assert_that(p.cargo("update"), execs().with_status(0)); - let lock = File::open(&lockfile).read_to_string().unwrap(); - assert!(lock.as_slice().contains(metadata.trim()), "{}", lock); + let mut lock = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); + assert!(lock.contains(metadata.trim()), "{}", lock); }); diff --git a/tests/test_cargo_new.rs b/tests/test_cargo_new.rs index 760c5d89f..6821333c0 100644 --- a/tests/test_cargo_new.rs +++ b/tests/test_cargo_new.rs @@ -1,5 +1,5 @@ -use std::old_io::{fs, USER_RWX, File, TempDir}; -use std::old_io::fs::PathExtensions; +use std::fs::{self, File, TempDir}; +use std::io::prelude::*; use std::env; use support::{execs, paths, cargo_dir}; @@ -11,20 +11,20 @@ fn setup() { } fn my_process(s: &str) -> ProcessBuilder { - process(s).unwrap() - .cwd(paths::root()) - .env("HOME", Some(paths::home())) + let mut p = process(s).unwrap(); + p.cwd(&paths::root()).env("HOME", &paths::home()); + return p; } fn cargo_process(s: &str) -> ProcessBuilder { - process(cargo_dir().join("cargo")).unwrap().arg(s) - .cwd(paths::root()) - .env("HOME", Some(paths::home())) + let mut p = process(&cargo_dir().join("cargo")).unwrap(); + p.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); + return p; } test!(simple_lib { assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("none") - .env("USER", Some("foo")), + .env("USER", "foo"), execs().with_status(0)); assert_that(&paths::root().join("foo"), existing_dir()); @@ -32,30 +32,30 @@ test!(simple_lib { assert_that(&paths::root().join("foo/src/lib.rs"), existing_file()); assert_that(&paths::root().join("foo/.gitignore"), is_not(existing_file())); - assert_that(cargo_process("build").cwd(paths::root().join("foo")), + assert_that(cargo_process("build").cwd(&paths::root().join("foo")), execs().with_status(0)); }); test!(simple_bin { assert_that(cargo_process("new").arg("foo").arg("--bin") - .env("USER", Some("foo")), + .env("USER", "foo"), execs().with_status(0)); assert_that(&paths::root().join("foo"), existing_dir()); assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); assert_that(&paths::root().join("foo/src/main.rs"), existing_file()); - assert_that(cargo_process("build").cwd(paths::root().join("foo")), + assert_that(cargo_process("build").cwd(&paths::root().join("foo")), execs().with_status(0)); - assert_that(&paths::root().join(format!("foo/target/foo{}", - env::consts::EXE_SUFFIX)), + assert_that(&paths::root().join(&format!("foo/target/foo{}", + env::consts::EXE_SUFFIX)), existing_file()); }); test!(simple_git { let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo").cwd(td.path().clone()) - .env("USER", Some("foo")), + .env("USER", "foo"), execs().with_status(0)); assert_that(td.path(), existing_dir()); @@ -64,7 +64,7 @@ test!(simple_git { assert_that(&td.path().join("foo/.git"), existing_dir()); assert_that(&td.path().join("foo/.gitignore"), existing_file()); - assert_that(cargo_process("build").cwd(td.path().clone().join("foo")), + assert_that(cargo_process("build").cwd(&td.path().clone().join("foo")), execs().with_status(0)); }); @@ -82,7 +82,7 @@ Usage: test!(existing { let dst = paths::root().join("foo"); - fs::mkdir(&dst, USER_RWX).unwrap(); + fs::create_dir(&dst).unwrap(); assert_that(cargo_process("new").arg("foo"), execs().with_status(101) .with_stderr(format!("Destination `{}` already exists\n", @@ -99,13 +99,14 @@ test!(finds_author_user { // Use a temp dir to make sure we don't pick up .cargo/config somewhere in // the hierarchy let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("foo").env("USER", Some("foo")) + assert_that(cargo_process("new").arg("foo").env("USER", "foo") .cwd(td.path().clone()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); - let toml = File::open(&toml).read_to_string().unwrap(); - assert!(toml.as_slice().contains(r#"authors = ["foo"]"#)); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); }); test!(finds_author_username { @@ -113,14 +114,15 @@ test!(finds_author_username { // the hierarchy let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo") - .env("USER", None::<&str>) - .env("USERNAME", Some("foo")) + .env_remove("USER") + .env("USERNAME", "foo") .cwd(td.path().clone()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); - let toml = File::open(&toml).read_to_string().unwrap(); - assert!(toml.as_slice().contains(r#"authors = ["foo"]"#)); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); }); test!(finds_author_git { @@ -128,12 +130,13 @@ test!(finds_author_git { .exec().unwrap(); my_process("git").args(&["config", "--global", "user.email", "baz"]) .exec().unwrap(); - assert_that(cargo_process("new").arg("foo").env("USER", Some("foo")), + assert_that(cargo_process("new").arg("foo").env("USER", "foo"), execs().with_status(0)); let toml = paths::root().join("foo/Cargo.toml"); - let toml = File::open(&toml).read_to_string().unwrap(); - assert!(toml.as_slice().contains(r#"authors = ["bar "]"#)); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); }); test!(author_prefers_cargo { @@ -142,48 +145,50 @@ test!(author_prefers_cargo { my_process("git").args(&["config", "--global", "user.email", "baz"]) .exec().unwrap(); let root = paths::root(); - fs::mkdir(&root.join(".cargo"), USER_RWX).unwrap(); - File::create(&root.join(".cargo/config")).write_str(r#" + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")).unwrap().write_all(br#" [cargo-new] name = "new-foo" email = "new-bar" git = false "#).unwrap(); - assert_that(cargo_process("new").arg("foo").env("USER", Some("foo")), + assert_that(cargo_process("new").arg("foo").env("USER", "foo"), execs().with_status(0)); let toml = paths::root().join("foo/Cargo.toml"); - let toml = File::open(&toml).read_to_string().unwrap(); - assert!(toml.as_slice().contains(r#"authors = ["new-foo "]"#)); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["new-foo "]"#)); assert!(!root.join("foo/.gitignore").exists()); }); test!(git_prefers_command_line { let root = paths::root(); let td = TempDir::new("cargo").unwrap(); - fs::mkdir(&root.join(".cargo"), USER_RWX).unwrap(); - File::create(&root.join(".cargo/config")).write_str(r#" + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")).unwrap().write_all(br#" [cargo-new] vcs = "none" name = "foo" email = "bar" "#).unwrap(); - assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("git").cwd(td.path().clone()) - .env("USER", Some("foo")), + assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("git") + .cwd(td.path()) + .env("USER", "foo"), execs().with_status(0)); assert!(td.path().join("foo/.gitignore").exists()); }); test!(subpackage_no_git { - assert_that(cargo_process("new").arg("foo").env("USER", Some("foo")), + assert_that(cargo_process("new").arg("foo").env("USER", "foo"), execs().with_status(0)); let subpackage = paths::root().join("foo").join("components"); - fs::mkdir(&subpackage, USER_RWX).unwrap(); + fs::create_dir(&subpackage).unwrap(); assert_that(cargo_process("new").arg("foo/components/subcomponent") - .env("USER", Some("foo")), + .env("USER", "foo"), execs().with_status(0)); assert_that(&paths::root().join("foo/components/subcomponent/.git"), diff --git a/tests/test_cargo_package.rs b/tests/test_cargo_package.rs index 30ab3ce4b..c2c2ab447 100644 --- a/tests/test_cargo_package.rs +++ b/tests/test_cargo_package.rs @@ -1,7 +1,9 @@ -use std::old_io::{File, MemReader}; +use std::fs::File; +use std::io::Cursor; +use std::io::prelude::*; use tar::Archive; -use flate2::reader::GzDecoder; +use flate2::read::GzDecoder; use cargo::util::process; use support::{project, execs, cargo_dir, paths, git}; @@ -38,18 +40,19 @@ test!(simple { compiling = COMPILING, dir = p.url()).as_slice())); assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file()); - assert_that(p.process(cargo_dir().join("cargo")).arg("package").arg("-l"), + assert_that(p.cargo("package").arg("-l"), execs().with_status(0).with_stdout("\ Cargo.toml src[..]main.rs ")); - assert_that(p.process(cargo_dir().join("cargo")).arg("package"), + assert_that(p.cargo("package"), execs().with_status(0).with_stdout("")); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - let mut rdr = GzDecoder::new(f); - let contents = rdr.read_to_end().unwrap(); - let ar = Archive::new(MemReader::new(contents)); + let mut rdr = GzDecoder::new(f).unwrap(); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let ar = Archive::new(Cursor::new(contents)); for f in ar.files().unwrap() { let f = f.unwrap(); let fname = f.filename_bytes(); @@ -111,7 +114,7 @@ warning: manifest has no description, documentation, homepage or repository. See http://doc.crates.io/manifest.html#package-metadata for more info.")); let p = project("all") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.1" @@ -155,9 +158,8 @@ test!(package_verbose { "#) .file("a/src/lib.rs", ""); p.build(); - let cargo = process(cargo_dir().join("cargo")).unwrap() - .cwd(root) - .env("HOME", Some(paths::home())); + let mut cargo = process(&cargo_dir().join("cargo")).unwrap(); + cargo.cwd(&root).env("HOME", &paths::home()); assert_that(cargo.clone().arg("build"), execs().with_status(0)); assert_that(cargo.arg("package").arg("-v") .arg("--no-verify"), @@ -183,7 +185,7 @@ test!(package_verification { "#); assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(p.process(cargo_dir().join("cargo")).arg("package"), + assert_that(p.cargo("package"), execs().with_status(0).with_stdout(format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) diff --git a/tests/test_cargo_publish.rs b/tests/test_cargo_publish.rs index 760aa1cd5..f52b38a3d 100644 --- a/tests/test_cargo_publish.rs +++ b/tests/test_cargo_publish.rs @@ -1,6 +1,9 @@ -use std::old_io::{self, fs, File, MemReader}; +use std::io::prelude::*; +use std::fs::{self, File}; +use std::io::{Cursor, SeekFrom}; +use std::path::PathBuf; -use flate2::reader::GzDecoder; +use flate2::read::GzDecoder; use tar::Archive; use url::Url; @@ -11,23 +14,23 @@ use support::git::repo; use hamcrest::assert_that; -fn registry_path() -> Path { paths::root().join("registry") } -fn registry() -> Url { Url::from_file_path(®istry_path()).ok().unwrap() } -fn upload_path() -> Path { paths::root().join("upload") } -fn upload() -> Url { Url::from_file_path(&upload_path()).ok().unwrap() } +fn registry_path() -> PathBuf { paths::root().join("registry") } +fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } +fn upload_path() -> PathBuf { paths::root().join("upload") } +fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } fn setup() { let config = paths::root().join(".cargo/config"); - fs::mkdir_recursive(&config.dir_path(), old_io::USER_DIR).unwrap(); - File::create(&config).write_str(format!(r#" + fs::create_dir_all(config.parent().unwrap()).unwrap(); + File::create(&config).unwrap().write_all(&format!(r#" [registry] index = "{reg}" token = "api-token" - "#, reg = registry()).as_slice()).unwrap(); - fs::mkdir_recursive(&upload_path().join("api/v1/crates"), old_io::USER_DIR).unwrap(); + "#, reg = registry()).as_bytes()).unwrap(); + fs::create_dir_all(&upload_path().join("api/v1/crates")).unwrap(); repo(®istry_path()) - .file("config.json", format!(r#"{{ + .file("config.json", &format!(r#"{{ "dl": "{0}", "api": "{0}" }}"#, upload())) @@ -60,13 +63,20 @@ test!(simple { let mut f = File::open(&upload_path().join("api/v1/crates/new")).unwrap(); // Skip the metadata payload and the size of the tarball - let sz = f.read_le_u32().unwrap(); - f.seek(sz as i64 + 4, old_io::SeekCur).unwrap(); + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz), Ok(4)); + let sz = ((sz[0] as u32) << 0) | + ((sz[1] as u32) << 8) | + ((sz[2] as u32) << 16) | + ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); // Verify the tarball let mut rdr = GzDecoder::new(f).unwrap(); assert_eq!(rdr.header().filename(), Some(b"foo-0.0.1.crate")); - let inner = MemReader::new(rdr.read_to_end().unwrap()); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let inner = Cursor::new(contents); let ar = Archive::new(inner); for file in ar.files().unwrap() { let file = file.unwrap(); diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs index 7db4eb9dc..730b31414 100644 --- a/tests/test_cargo_registry.rs +++ b/tests/test_cargo_registry.rs @@ -1,9 +1,10 @@ -use std::old_io::{self, fs, File}; +use std::fs::{self, File}; +use std::io::prelude::*; use cargo::util::process; use support::{project, execs, cargo_dir}; use support::{UPDATING, DOWNLOADING, COMPILING, PACKAGING, VERIFYING}; -use support::paths::{self, PathExt}; +use support::paths::{self, CargoPathExt}; use support::registry as r; use support::git; @@ -192,7 +193,7 @@ version required: >= 0.0.0 r::mock_pkg("notyet", "0.0.1", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `{reg}` {downloading} notyet v0.0.1 (registry file://[..]) @@ -231,7 +232,7 @@ test!(package_with_path_deps { .file("notyet/src/lib.rs", ""); p.build(); - assert_that(p.process(cargo_dir().join("cargo")).arg("package").arg("-v"), + assert_that(p.cargo("package").arg("-v"), execs().with_status(101).with_stderr("\ failed to verify package tarball @@ -243,7 +244,7 @@ version required: ^0.0.1 r::mock_pkg("notyet", "0.0.1", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("package"), + assert_that(p.cargo("package"), execs().with_status(0).with_stdout(format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) @@ -277,7 +278,7 @@ test!(lockfile_locks { r::mock_pkg("bar", "0.0.1", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` {downloading} bar v0.0.1 (registry file://[..]) @@ -289,7 +290,7 @@ test!(lockfile_locks { p.root().move_into_the_past().unwrap(); r::mock_pkg("bar", "0.0.2", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); @@ -310,7 +311,7 @@ test!(lockfile_locks_transitively { r::mock_pkg("baz", "0.0.1", &[]); r::mock_pkg("bar", "0.0.1", &[("baz", "*", "normal")]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` {downloading} [..] v0.0.1 (registry file://[..]) @@ -325,7 +326,7 @@ test!(lockfile_locks_transitively { r::mock_pkg("baz", "0.0.2", &[]); r::mock_pkg("bar", "0.0.2", &[("baz", "*", "normal")]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); @@ -348,7 +349,7 @@ test!(yanks_are_not_used { r::mock_pkg("bar", "0.0.1", &[("baz", "*", "normal")]); r::mock_pkg_yank("bar", "0.0.2", &[("baz", "*", "normal")], true); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` {downloading} [..] v0.0.1 (registry file://[..]) @@ -378,7 +379,7 @@ test!(relying_on_a_yank_is_bad { r::mock_pkg_yank("baz", "0.0.2", &[], true); r::mock_pkg("bar", "0.0.1", &[("baz", "=0.0.2", "normal")]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(101).with_stderr("\ no matching package named `baz` found (required by `bar`) location searched: registry file://[..] @@ -403,17 +404,17 @@ test!(yanks_in_lockfiles_are_ok { r::mock_pkg("bar", "0.0.1", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0)); - fs::rmdir_recursive(&r::registry_path().join("3")).unwrap(); + fs::remove_dir_all(&r::registry_path().join("3")).unwrap(); r::mock_pkg_yank("bar", "0.0.1", &[], true); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); - assert_that(p.process(cargo_dir().join("cargo")).arg("update"), + assert_that(p.cargo("update"), execs().with_status(101).with_stderr("\ no matching package named `bar` found (required by `foo`) location searched: registry file://[..] @@ -436,12 +437,12 @@ test!(update_with_lockfile_if_packages_missing { p.build(); r::mock_pkg("bar", "0.0.1", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); - fs::rmdir_recursive(&paths::home().join(".cargo/registry")).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + fs::remove_dir_all(&paths::home().join(".cargo/registry")).unwrap(); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` {downloading} bar v0.0.1 (registry file://[..]) @@ -464,21 +465,21 @@ test!(update_lockfile { println!("0.0.1"); r::mock_pkg("bar", "0.0.1", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0)); r::mock_pkg("bar", "0.0.2", &[]); r::mock_pkg("bar", "0.0.3", &[]); - fs::rmdir_recursive(&paths::home().join(".cargo/registry")).unwrap(); + fs::remove_dir_all(&paths::home().join(".cargo/registry")).unwrap(); println!("0.0.2 update"); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") + assert_that(p.cargo("update") .arg("-p").arg("bar").arg("--precise").arg("0.0.2"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` ", updating = UPDATING).as_slice())); println!("0.0.2 build"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {downloading} [..] v0.0.2 (registry file://[..]) {compiling} bar v0.0.2 (registry file://[..]) @@ -487,14 +488,14 @@ test!(update_lockfile { dir = p.url()).as_slice())); println!("0.0.3 update"); - assert_that(p.process(cargo_dir().join("cargo")).arg("update") + assert_that(p.cargo("update") .arg("-p").arg("bar"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` ", updating = UPDATING).as_slice())); println!("0.0.3 build"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {downloading} [..] v0.0.3 (registry file://[..]) {compiling} bar v0.0.3 (registry file://[..]) @@ -520,7 +521,7 @@ test!(dev_dependency_not_used { r::mock_pkg("baz", "0.0.1", &[]); r::mock_pkg("bar", "0.0.1", &[("baz", "*", "dev")]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` {downloading} [..] v0.0.1 (registry file://[..]) @@ -532,11 +533,11 @@ test!(dev_dependency_not_used { test!(login_with_no_cargo_dir { let home = paths::home().join("new-home"); - fs::mkdir(&home, old_io::USER_DIR).unwrap(); - assert_that(process(cargo_dir().join("cargo")).unwrap() + fs::create_dir(&home).unwrap(); + assert_that(process(&cargo_dir().join("cargo")).unwrap() .arg("login").arg("foo").arg("-v") - .cwd(paths::root()) - .env("HOME", Some(home)), + .cwd(&paths::root()) + .env("HOME", &home), execs().with_status(0)); }); @@ -586,7 +587,7 @@ test!(updating_a_dep { r::mock_pkg("bar", "0.0.1", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` {downloading} bar v0.0.1 (registry file://[..]) @@ -596,7 +597,7 @@ test!(updating_a_dep { ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()).as_slice())); - File::create(&p.root().join("a/Cargo.toml")).write_str(r#" + File::create(&p.root().join("a/Cargo.toml")).unwrap().write_all(br#" [project] name = "a" version = "0.0.1" @@ -608,7 +609,7 @@ test!(updating_a_dep { r::mock_pkg("bar", "0.1.0", &[]); println!("second"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` {downloading} bar v0.1.0 (registry file://[..]) @@ -633,7 +634,7 @@ test!(git_and_registry_dep { .file("src/lib.rs", ""); b.build(); let p = project("foo") - .file("Cargo.toml", format!(r#" + .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.1" @@ -651,7 +652,7 @@ test!(git_and_registry_dep { r::mock_pkg("a", "0.0.1", &[]); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} [..] {updating} [..] @@ -664,7 +665,7 @@ test!(git_and_registry_dep { p.root().move_into_the_past().unwrap(); println!("second"); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); @@ -684,20 +685,21 @@ test!(update_publish_then_update { r::mock_pkg("a", "0.1.0", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0)); r::mock_pkg("a", "0.1.1", &[]); let lock = p.root().join("Cargo.lock"); - let s = File::open(&lock).unwrap().read_to_string().unwrap(); - File::create(&lock).unwrap().write_str(s.replace("0.1.0", "0.1.1").as_slice()) - .unwrap(); + let mut s = String::new(); + File::open(&lock).unwrap().read_to_string(&mut s).unwrap(); + File::create(&lock).unwrap() + .write_all(s.replace("0.1.0", "0.1.1").as_bytes()).unwrap(); println!("second"); - fs::rmdir_recursive(&p.root().join("target")).unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + fs::remove_dir_all(&p.root().join("target")).unwrap(); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {updating} [..] {downloading} a v0.1.1 (registry file://[..]) @@ -724,7 +726,7 @@ test!(fetch_downloads { r::mock_pkg("a", "0.1.0", &[]); - assert_that(p.process(cargo_dir().join("cargo")).arg("fetch"), + assert_that(p.cargo("fetch"), execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` diff --git a/tests/test_cargo_run.rs b/tests/test_cargo_run.rs index 1abd19dc1..cc85e968c 100644 --- a/tests/test_cargo_run.rs +++ b/tests/test_cargo_run.rs @@ -1,6 +1,6 @@ use std::old_path; -use support::{project, cargo_dir, execs, path2url}; +use support::{project, execs, path2url}; use support::{COMPILING, RUNNING}; use hamcrest::{assert_that, existing_file}; @@ -131,7 +131,7 @@ hello a.rs dir = path2url(p.root()), sep = old_path::SEP).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("run").arg("--bin").arg("b"), + assert_that(p.cargo("run").arg("--bin").arg("b"), execs().with_status(0).with_stdout(format!("\ {running} `target{sep}b` hello b.rs @@ -294,7 +294,7 @@ fast2 url = path2url(p.root()), sep = old_path::SEP).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("run").arg("-v").arg("--example").arg("a"), + assert_that(p.cargo("run").arg("-v").arg("--example").arg("a"), execs().with_status(0).with_stdout(format!("\ {compiling} bar v0.0.1 ({url}) {running} `rustc bar{sep}src{sep}bar.rs --crate-name bar --crate-type lib \ diff --git a/tests/test_cargo_search.rs b/tests/test_cargo_search.rs index e67eed073..667a66cdc 100644 --- a/tests/test_cargo_search.rs +++ b/tests/test_cargo_search.rs @@ -1,4 +1,6 @@ -use std::old_io::{self, fs, File}; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; use url::Url; @@ -10,22 +12,22 @@ use support::git::repo; use hamcrest::assert_that; -fn registry_path() -> Path { paths::root().join("registry") } -fn registry() -> Url { Url::from_file_path(®istry_path()).ok().unwrap() } -fn api_path() -> Path { paths::root().join("api") } -fn api() -> Url { Url::from_file_path(&api_path()).ok().unwrap() } +fn registry_path() -> PathBuf { paths::root().join("registry") } +fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } +fn api_path() -> PathBuf { paths::root().join("api") } +fn api() -> Url { Url::from_file_path(&*api_path()).ok().unwrap() } fn setup() { let config = paths::root().join(".cargo/config"); - fs::mkdir_recursive(&config.dir_path(), old_io::USER_DIR).unwrap(); - File::create(&config).write_str(format!(r#" + fs::create_dir_all(config.parent().unwrap()).unwrap(); + File::create(&config).unwrap().write_all(format!(r#" [registry] index = "{reg}" - "#, reg = registry()).as_slice()).unwrap(); - fs::mkdir_recursive(&api_path().join("api/v1"), old_io::USER_DIR).unwrap(); + "#, reg = registry()).as_bytes()).unwrap(); + fs::create_dir_all(&api_path().join("api/v1")).unwrap(); repo(®istry_path()) - .file("config.json", format!(r#"{{ + .file("config.json", &format!(r#"{{ "dl": "{0}", "api": "{0}" }}"#, api())) @@ -33,9 +35,9 @@ fn setup() { } fn cargo_process(s: &str) -> ProcessBuilder { - process(cargo_dir().join("cargo")).unwrap().arg(s) - .cwd(paths::root()) - .env("HOME", Some(paths::home())) + let mut b = process(&cargo_dir().join("cargo")).unwrap(); + b.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); + b } test!(simple { @@ -72,10 +74,10 @@ test!(simple { // // On windows, though, `?` is an invalid character, but we always build curl // from source there anyway! - File::create(&base).write_str(contents).unwrap(); + File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap(); if !cfg!(windows) { - File::create(&base.with_filename("crates?q=postgres")) - .write_str(contents).unwrap(); + File::create(&base.with_file_name("crates?q=postgres")).unwrap() + .write_all(contents.as_bytes()).unwrap(); } assert_that(cargo_process("search").arg("postgres"), diff --git a/tests/test_cargo_test.rs b/tests/test_cargo_test.rs index 715e9875d..67cd1aec3 100644 --- a/tests/test_cargo_test.rs +++ b/tests/test_cargo_test.rs @@ -1,9 +1,8 @@ -use std::old_path; use std::str; use support::{project, execs, basic_bin_manifest, basic_lib_manifest}; -use support::{COMPILING, cargo_dir, RUNNING, DOCTEST}; -use support::paths::PathExt; +use support::{COMPILING, RUNNING, DOCTEST}; +use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file}; use cargo::util::process; @@ -29,12 +28,11 @@ test!(cargo_test_simple { assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("hello\n")); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), - execs().with_stdout(format!("\ + assert_that(p.cargo("test"), + execs().with_stdout(format!("\ {} foo v0.5.0 ({}) {} target[..]foo-[..] @@ -94,7 +92,7 @@ test!(many_similar_names { "#); let output = p.cargo_process("test").exec_with_output().unwrap(); - let output = str::from_utf8(output.output.as_slice()).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("test bin_test"), "bin_test missing\n{}", output); assert!(output.contains("test lib_test"), "lib_test missing\n{}", output); assert!(output.contains("test test_test"), "test_test missing\n{}", output); @@ -120,12 +118,11 @@ test!(cargo_test_failing_test { assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); - assert_that( - process(p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("hello\n")); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), - execs().with_stdout(format!("\ + assert_that(p.cargo("test"), + execs().with_stdout(format!("\ {} foo v0.5.0 ({}) {} target[..]foo-[..] @@ -137,7 +134,7 @@ failures: ---- test_hello stdout ---- thread 'test_hello' panicked at 'assertion failed: \ `(left == right) && (right == left)` (left: \ - `\"hello\"`, right: `\"nope\"`)', src{sep}foo.rs:12 + `\"hello\"`, right: `\"nope\"`)', src[..]foo.rs:12 @@ -147,8 +144,7 @@ failures: test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured ", - COMPILING, p.url(), RUNNING, - sep = old_path::SEP)) + COMPILING, p.url(), RUNNING)) .with_stderr(format!("\ thread '
' panicked at 'Some tests failed', [..] @@ -470,7 +466,7 @@ test!(cargo_test_twice { p.cargo_process("build"); for _ in range(0, 2) { - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0)); } }); @@ -642,7 +638,7 @@ test!(bin_there_for_integration { "#); let output = p.cargo_process("test").exec_with_output().unwrap(); - let output = str::from_utf8(output.output.as_slice()).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("main_test ... ok"), "no main_test\n{}", output); assert!(output.contains("test_test ... ok"), "no test_test\n{}", output); }); @@ -720,7 +716,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured doctest = DOCTEST, dir = p.url()).as_slice())); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(format!("\ {running} target[..]foo-[..] @@ -786,7 +782,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured doctest = DOCTEST, dir = p.url()).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(format!("\ {running} target[..]foo-[..] @@ -842,7 +838,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured doctest = DOCTEST, dir = p.url()).as_slice())); - assert_that(p.process(cargo_dir().join("cargo")).arg("build"), + assert_that(p.cargo("build"), execs().with_status(0) .with_stdout("")); }); @@ -1017,8 +1013,7 @@ test!(selective_testing { p.build(); println!("d1"); - assert_that(p.process(cargo_dir().join("cargo")).arg("test") - .arg("-p").arg("d1"), + assert_that(p.cargo("test").arg("-p").arg("d1"), execs().with_status(0) .with_stdout(format!("\ {compiling} d1 v0.0.1 ({dir}) @@ -1031,8 +1026,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured\n dir = p.url()).as_slice())); println!("d2"); - assert_that(p.process(cargo_dir().join("cargo")).arg("test") - .arg("-p").arg("d2"), + assert_that(p.cargo("test").arg("-p").arg("d2"), execs().with_status(0) .with_stdout(format!("\ {compiling} d2 v0.0.1 ({dir}) @@ -1045,7 +1039,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured\n dir = p.url()).as_slice())); println!("whole"); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) @@ -1096,7 +1090,7 @@ test!(almost_cyclic_but_not_quite { .file("c/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(p.process(cargo_dir().join("cargo")).arg("test"), + assert_that(p.cargo("test"), execs().with_status(0)); }); @@ -1123,8 +1117,7 @@ test!(build_then_selective_test { assert_that(p.cargo_process("build"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); - assert_that(p.process(cargo_dir().join("cargo")).arg("test") - .arg("-p").arg("b"), + assert_that(p.cargo("test").arg("-p").arg("b"), execs().with_status(0)); }); @@ -1204,8 +1197,7 @@ test!(selective_testing_with_docs { .file("d1/d1.rs", ""); p.build(); - assert_that(p.process(cargo_dir().join("cargo")).arg("test") - .arg("-p").arg("d1"), + assert_that(p.cargo("test").arg("-p").arg("d1"), execs().with_status(0) .with_stdout(format!("\ {compiling} d1 v0.0.1 ({dir}) @@ -1248,9 +1240,9 @@ test!(example_bin_same_name { assert_that(&p.bin("foo"), existing_file()); assert_that(&p.bin("examples/foo"), existing_file()); - assert_that(p.process(p.bin("foo")), + assert_that(p.process(&p.bin("foo")), execs().with_status(0).with_stdout("bin\n")); - assert_that(p.process(p.bin("examples/foo")), + assert_that(p.process(&p.bin("examples/foo")), execs().with_status(0).with_stdout("example\n")); }); @@ -1270,7 +1262,7 @@ test!(test_with_example_twice { execs().with_status(0)); assert_that(&p.bin("examples/foo"), existing_file()); println!("second"); - assert_that(p.process(cargo_dir().join("cargo")).arg("test").arg("-v"), + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); assert_that(&p.bin("examples/foo"), existing_file()); }); @@ -1328,7 +1320,7 @@ test!(bin_is_preserved { assert_that(&p.bin("foo"), existing_file()); println!("testing"); - assert_that(p.process(cargo_dir().join("cargo")).arg("test").arg("-v"), + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); }); diff --git a/tests/tests.rs b/tests/tests.rs index 18c5856b9..e92077c01 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -1,10 +1,11 @@ -#![feature(collections, core, io, old_io, os, old_path, rustc_private, std_misc, env)] +#![feature(core, io, old_io, os, old_path)] +#![feature(std_misc, env, io, path, fs, tempdir, process)] +extern crate "rustc-serialize" as serialize; extern crate cargo; extern crate flate2; extern crate git2; extern crate hamcrest; -extern crate serialize; extern crate tar; extern crate term; extern crate url;