cargo/tests/test_cargo_compile_path_deps.rs

881 lines
23 KiB
Rust
Raw Normal View History

use std::fs::{self, File};
use std::io::prelude::*;
use support::{project, execs, main_file};
use support::paths::{self, CargoPathExt};
use hamcrest::{assert_that, existing_file};
2015-10-28 09:20:00 +00:00
use cargo::util::process;
2014-06-18 00:05:29 +00:00
#[test]
fn cargo_compile_with_nested_deps_shorthand() {
let p = project("foo")
2014-06-18 00:05:29 +00:00
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
version = "0.5.0"
path = "bar"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
2015-03-26 18:17:44 +00:00
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
2014-06-18 00:05:29 +00:00
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
version = "0.5.0"
path = "baz"
2014-08-14 06:08:02 +00:00
[lib]
2014-06-18 00:05:29 +00:00
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("bar/baz/Cargo.toml", r#"
2014-06-18 00:05:29 +00:00
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
2014-08-14 06:08:02 +00:00
[lib]
2014-06-18 00:05:29 +00:00
name = "baz"
"#)
.file("bar/baz/src/baz.rs", r#"
2014-06-18 00:05:29 +00:00
pub fn gimme() -> String {
"test passed".to_string()
2014-06-18 00:05:29 +00:00
}
"#);
assert_that(p.cargo_process("build"),
2015-01-04 09:02:16 +00:00
execs().with_status(0)
2016-05-20 01:07:54 +00:00
.with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/bar/baz)\n\
[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url(),
p.url())));
2014-06-18 00:05:29 +00:00
assert_that(&p.bin("foo"), existing_file());
2014-06-18 00:05:29 +00:00
2015-10-28 09:20:00 +00:00
assert_that(process(&p.bin("foo")),
execs().with_stdout("test passed\n").with_status(0));
println!("cleaning");
assert_that(p.cargo("clean"),
2015-01-04 09:02:16 +00:00
execs().with_stdout("").with_status(0));
println!("building baz");
assert_that(p.cargo("build").arg("-p").arg("baz"),
2015-01-04 09:02:16 +00:00
execs().with_status(0)
2016-05-20 01:07:54 +00:00
.with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/bar/baz)\n",
p.url())));
println!("building foo");
assert_that(p.cargo("build")
.arg("-p").arg("foo"),
2015-01-04 09:02:16 +00:00
execs().with_status(0)
2016-05-20 01:07:54 +00:00
.with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url())));
}
#[test]
fn cargo_compile_with_root_dev_deps() {
2014-07-03 01:13:00 +00:00
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dev-dependencies.bar]
version = "0.5.0"
2014-07-10 19:08:13 +00:00
path = "../bar"
2014-07-03 01:13:00 +00:00
[[bin]]
name = "foo"
"#)
2014-07-10 19:08:13 +00:00
.file("src/main.rs",
2015-03-26 18:17:44 +00:00
&main_file(r#""{}", bar::gimme()"#, &["bar"]));
2014-07-10 19:08:13 +00:00
let p2 = project("bar")
.file("Cargo.toml", r#"
[package]
2014-07-03 01:13:00 +00:00
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
2014-07-10 19:08:13 +00:00
.file("src/lib.rs", r#"
2014-07-03 01:13:00 +00:00
pub fn gimme() -> &'static str {
"zoidberg"
}
2014-07-10 19:08:13 +00:00
"#);
2014-07-03 01:13:00 +00:00
2014-07-10 19:08:13 +00:00
p2.build();
assert_that(p.cargo_process("build"),
execs().with_status(101))
}
2014-07-03 01:13:00 +00:00
#[test]
fn cargo_compile_with_root_dev_deps_with_testing() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
2014-07-03 01:13:00 +00:00
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dev-dependencies.bar]
version = "0.5.0"
path = "../bar"
[[bin]]
name = "foo"
"#)
.file("src/main.rs",
2015-03-26 18:17:44 +00:00
&main_file(r#""{}", bar::gimme()"#, &["bar"]));
let p2 = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("src/lib.rs", r#"
pub fn gimme() -> &'static str {
"zoidberg"
}
"#);
p2.build();
assert_that(p.cargo_process("test"),
2016-05-15 21:48:11 +00:00
execs().with_stderr("\
[COMPILING] [..] v0.5.0 ([..])
[COMPILING] [..] v0.5.0 ([..])
2016-05-15 21:48:11 +00:00
[RUNNING] target[..]foo-[..]")
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
2016-05-12 17:06:36 +00:00
"));
}
2014-07-03 01:13:00 +00:00
#[test]
fn cargo_compile_with_transitive_dev_deps() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
version = "0.5.0"
path = "bar"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
2015-03-26 18:17:44 +00:00
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dev-dependencies.baz]
git = "git://example.com/path/to/nowhere"
2014-08-14 06:08:02 +00:00
[lib]
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
pub fn gimme() -> &'static str {
"zoidberg"
}
"#);
assert_that(p.cargo_process("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url())));
assert_that(&p.bin("foo"), existing_file());
2015-10-28 09:20:00 +00:00
assert_that(process(&p.bin("foo")),
execs().with_stdout("zoidberg\n"));
}
#[test]
fn no_rebuild_dependency() {
let mut p = project("foo");
p = p
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[[bin]] name = "foo"
Implement a registry source # cargo upload The cargo-upload command will take the local package and upload it to the specified registry. The local package is uploaded as a tarball compressed with gzip under maximum compression. Most of this is done by just delegating to `cargo package` The host to upload to is specified, in order of priority, by a command line `--host` flag, the `registry.host` config key, and then the default registry. The default registry is still `example.com` The registry itself is still a work in progress, but the general plumbing for a command such as this would look like: 1. Ensure the local package has been compressed into an archive. 2. Fetch the relevant registry and login token from config files. 3. Ensure all dependencies for a package are listed as coming from the same registry. 4. Upload the archive to the registry with the login token. 5. The registry will verify the package is under 2MB (configurable). 6. The registry will upload the archive to S3, calculating a checksum in the process. 7. The registry will add an entry to the registry's index (a git repository). The entry will include the name of the package, the version uploaded, the checksum of the upload, and then the list of dependencies (name/version req) 8. The local `cargo upload` command will succeed. # cargo login Uploading requires a token from the api server, and this token follows the same config chain for the host except that there is no fallback. To implement login, the `cargo login` command is used. With 0 arguments, the command will request that a site be visited for a login token, and with an argument it will set the argument as the new login token. The `util::config` module was modified to allow writing configuration as well as reading it. The support is a little lacking in that comments are blown away, but the support is there at least. # RegistrySource An implementation of `RegistrySource` has been created (deleting the old `DummyRegistrySource`). This implementation only needs a URL to be constructed, and it is assumed that the URL is running an instance of the cargo registry. ## RegistrySource::update Currently this will unconditionally update the registry's index (a git repository). Tuning is necessary to prevent updating the index each time (more coming soon). ## RegistrySource::query This is called in the resolve phase of cargo. This function is given a dependency to query for, and the source will simply look into the index to see if any package with the name is present. If found, the package's index file will be loaded and parsed into a list of summaries. The main optimization of this function is to not require the entire registry to ever be resident in memory. Instead, only necessary packages are loaded into memory and parsed. ## RegistrySource::download This is also called during the resolve phase of cargo, but only when a package has been selected to be built (actually resolved). This phase of the source will actually download and unpack the tarball for the package. Currently a configuration file is located in the root of a registry's index describing the root url to download packages from. This function is optimized for two different metrics: 1. If a tarball is downloaded, it is not downloaded again. It is assumed that once a tarball is successfully downloaded it will never change. 2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the unpacking has already occurred and does not need to happen again. With these in place, a rebuild should take almost no time at all. ## RegistrySource::get This function is simply implemented in terms of a PathSource's `get` function by creating a `PathSource` for all unpacked tarballs as part of the `download` stage. ## Filesystem layout There are a few new directories as part of the `.cargo` home folder: * `.cargo/registry/index/$hostname-$hash` - This is the directory containing the actual index of the registry. `$hostname` comes from its url, and `$hash` is the hash of the entire url. * `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a directory used to cache the downloads of packages from the registry. * `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the unpacked packages. They will be compiled from this location. # New Dependencies Cargo has picked up a new dependency on the `curl-rust` package in order to send HTTP requests to the registry as well as send HTTP requests to download tarballs.
2014-07-18 15:40:45 +00:00
[dependencies.bar] path = "bar"
"#)
.file("src/foo.rs", r#"
extern crate bar;
fn main() { bar::bar() }
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
2014-08-14 06:08:02 +00:00
[lib] name = "bar"
"#)
.file("bar/src/bar.rs", r#"
pub fn bar() {}
"#);
// First time around we should compile both foo and bar
assert_that(p.cargo_process("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url())));
// This time we shouldn't compile bar
assert_that(p.cargo("build"),
execs().with_stdout(""));
p.root().move_into_the_past().unwrap();
p.build(); // rebuild the files (rewriting them in the process)
assert_that(p.cargo("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url())));
}
#[test]
fn deep_dependencies_trigger_rebuild() {
let mut p = project("foo");
p = p
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[[bin]]
name = "foo"
Implement a registry source # cargo upload The cargo-upload command will take the local package and upload it to the specified registry. The local package is uploaded as a tarball compressed with gzip under maximum compression. Most of this is done by just delegating to `cargo package` The host to upload to is specified, in order of priority, by a command line `--host` flag, the `registry.host` config key, and then the default registry. The default registry is still `example.com` The registry itself is still a work in progress, but the general plumbing for a command such as this would look like: 1. Ensure the local package has been compressed into an archive. 2. Fetch the relevant registry and login token from config files. 3. Ensure all dependencies for a package are listed as coming from the same registry. 4. Upload the archive to the registry with the login token. 5. The registry will verify the package is under 2MB (configurable). 6. The registry will upload the archive to S3, calculating a checksum in the process. 7. The registry will add an entry to the registry's index (a git repository). The entry will include the name of the package, the version uploaded, the checksum of the upload, and then the list of dependencies (name/version req) 8. The local `cargo upload` command will succeed. # cargo login Uploading requires a token from the api server, and this token follows the same config chain for the host except that there is no fallback. To implement login, the `cargo login` command is used. With 0 arguments, the command will request that a site be visited for a login token, and with an argument it will set the argument as the new login token. The `util::config` module was modified to allow writing configuration as well as reading it. The support is a little lacking in that comments are blown away, but the support is there at least. # RegistrySource An implementation of `RegistrySource` has been created (deleting the old `DummyRegistrySource`). This implementation only needs a URL to be constructed, and it is assumed that the URL is running an instance of the cargo registry. ## RegistrySource::update Currently this will unconditionally update the registry's index (a git repository). Tuning is necessary to prevent updating the index each time (more coming soon). ## RegistrySource::query This is called in the resolve phase of cargo. This function is given a dependency to query for, and the source will simply look into the index to see if any package with the name is present. If found, the package's index file will be loaded and parsed into a list of summaries. The main optimization of this function is to not require the entire registry to ever be resident in memory. Instead, only necessary packages are loaded into memory and parsed. ## RegistrySource::download This is also called during the resolve phase of cargo, but only when a package has been selected to be built (actually resolved). This phase of the source will actually download and unpack the tarball for the package. Currently a configuration file is located in the root of a registry's index describing the root url to download packages from. This function is optimized for two different metrics: 1. If a tarball is downloaded, it is not downloaded again. It is assumed that once a tarball is successfully downloaded it will never change. 2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the unpacking has already occurred and does not need to happen again. With these in place, a rebuild should take almost no time at all. ## RegistrySource::get This function is simply implemented in terms of a PathSource's `get` function by creating a `PathSource` for all unpacked tarballs as part of the `download` stage. ## Filesystem layout There are a few new directories as part of the `.cargo` home folder: * `.cargo/registry/index/$hostname-$hash` - This is the directory containing the actual index of the registry. `$hostname` comes from its url, and `$hash` is the hash of the entire url. * `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a directory used to cache the downloads of packages from the registry. * `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the unpacked packages. They will be compiled from this location. # New Dependencies Cargo has picked up a new dependency on the `curl-rust` package in order to send HTTP requests to the registry as well as send HTTP requests to download tarballs.
2014-07-18 15:40:45 +00:00
[dependencies.bar]
path = "bar"
"#)
.file("src/foo.rs", r#"
extern crate bar;
fn main() { bar::bar() }
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
2014-08-14 06:08:02 +00:00
[lib]
name = "bar"
Implement a registry source # cargo upload The cargo-upload command will take the local package and upload it to the specified registry. The local package is uploaded as a tarball compressed with gzip under maximum compression. Most of this is done by just delegating to `cargo package` The host to upload to is specified, in order of priority, by a command line `--host` flag, the `registry.host` config key, and then the default registry. The default registry is still `example.com` The registry itself is still a work in progress, but the general plumbing for a command such as this would look like: 1. Ensure the local package has been compressed into an archive. 2. Fetch the relevant registry and login token from config files. 3. Ensure all dependencies for a package are listed as coming from the same registry. 4. Upload the archive to the registry with the login token. 5. The registry will verify the package is under 2MB (configurable). 6. The registry will upload the archive to S3, calculating a checksum in the process. 7. The registry will add an entry to the registry's index (a git repository). The entry will include the name of the package, the version uploaded, the checksum of the upload, and then the list of dependencies (name/version req) 8. The local `cargo upload` command will succeed. # cargo login Uploading requires a token from the api server, and this token follows the same config chain for the host except that there is no fallback. To implement login, the `cargo login` command is used. With 0 arguments, the command will request that a site be visited for a login token, and with an argument it will set the argument as the new login token. The `util::config` module was modified to allow writing configuration as well as reading it. The support is a little lacking in that comments are blown away, but the support is there at least. # RegistrySource An implementation of `RegistrySource` has been created (deleting the old `DummyRegistrySource`). This implementation only needs a URL to be constructed, and it is assumed that the URL is running an instance of the cargo registry. ## RegistrySource::update Currently this will unconditionally update the registry's index (a git repository). Tuning is necessary to prevent updating the index each time (more coming soon). ## RegistrySource::query This is called in the resolve phase of cargo. This function is given a dependency to query for, and the source will simply look into the index to see if any package with the name is present. If found, the package's index file will be loaded and parsed into a list of summaries. The main optimization of this function is to not require the entire registry to ever be resident in memory. Instead, only necessary packages are loaded into memory and parsed. ## RegistrySource::download This is also called during the resolve phase of cargo, but only when a package has been selected to be built (actually resolved). This phase of the source will actually download and unpack the tarball for the package. Currently a configuration file is located in the root of a registry's index describing the root url to download packages from. This function is optimized for two different metrics: 1. If a tarball is downloaded, it is not downloaded again. It is assumed that once a tarball is successfully downloaded it will never change. 2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the unpacking has already occurred and does not need to happen again. With these in place, a rebuild should take almost no time at all. ## RegistrySource::get This function is simply implemented in terms of a PathSource's `get` function by creating a `PathSource` for all unpacked tarballs as part of the `download` stage. ## Filesystem layout There are a few new directories as part of the `.cargo` home folder: * `.cargo/registry/index/$hostname-$hash` - This is the directory containing the actual index of the registry. `$hostname` comes from its url, and `$hash` is the hash of the entire url. * `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a directory used to cache the downloads of packages from the registry. * `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the unpacked packages. They will be compiled from this location. # New Dependencies Cargo has picked up a new dependency on the `curl-rust` package in order to send HTTP requests to the registry as well as send HTTP requests to download tarballs.
2014-07-18 15:40:45 +00:00
[dependencies.baz]
path = "../baz"
"#)
.file("bar/src/bar.rs", r#"
extern crate baz;
pub fn bar() { baz::baz() }
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
2014-08-14 06:08:02 +00:00
[lib]
name = "baz"
"#)
.file("baz/src/baz.rs", r#"
pub fn baz() {}
"#);
assert_that(p.cargo_process("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\
[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url(),
p.url())));
assert_that(p.cargo("build"),
execs().with_stdout(""));
// Make sure an update to baz triggers a rebuild of bar
//
// We base recompilation off mtime, so sleep for at least a second to ensure
// that this write will change the mtime.
::sleep_ms(1000);
File::create(&p.root().join("baz/src/baz.rs")).unwrap().write_all(br#"
pub fn baz() { println!("hello!"); }
"#).unwrap();
assert_that(p.cargo("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\
[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url(),
p.url())));
// Make sure an update to bar doesn't trigger baz
::sleep_ms(1000);
File::create(&p.root().join("bar/src/bar.rs")).unwrap().write_all(br#"
extern crate baz;
pub fn bar() { println!("hello!"); baz::baz(); }
"#).unwrap();
assert_that(p.cargo("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url())));
}
#[test]
fn no_rebuild_two_deps() {
let mut p = project("foo");
p = p
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[[bin]]
name = "foo"
Implement a registry source # cargo upload The cargo-upload command will take the local package and upload it to the specified registry. The local package is uploaded as a tarball compressed with gzip under maximum compression. Most of this is done by just delegating to `cargo package` The host to upload to is specified, in order of priority, by a command line `--host` flag, the `registry.host` config key, and then the default registry. The default registry is still `example.com` The registry itself is still a work in progress, but the general plumbing for a command such as this would look like: 1. Ensure the local package has been compressed into an archive. 2. Fetch the relevant registry and login token from config files. 3. Ensure all dependencies for a package are listed as coming from the same registry. 4. Upload the archive to the registry with the login token. 5. The registry will verify the package is under 2MB (configurable). 6. The registry will upload the archive to S3, calculating a checksum in the process. 7. The registry will add an entry to the registry's index (a git repository). The entry will include the name of the package, the version uploaded, the checksum of the upload, and then the list of dependencies (name/version req) 8. The local `cargo upload` command will succeed. # cargo login Uploading requires a token from the api server, and this token follows the same config chain for the host except that there is no fallback. To implement login, the `cargo login` command is used. With 0 arguments, the command will request that a site be visited for a login token, and with an argument it will set the argument as the new login token. The `util::config` module was modified to allow writing configuration as well as reading it. The support is a little lacking in that comments are blown away, but the support is there at least. # RegistrySource An implementation of `RegistrySource` has been created (deleting the old `DummyRegistrySource`). This implementation only needs a URL to be constructed, and it is assumed that the URL is running an instance of the cargo registry. ## RegistrySource::update Currently this will unconditionally update the registry's index (a git repository). Tuning is necessary to prevent updating the index each time (more coming soon). ## RegistrySource::query This is called in the resolve phase of cargo. This function is given a dependency to query for, and the source will simply look into the index to see if any package with the name is present. If found, the package's index file will be loaded and parsed into a list of summaries. The main optimization of this function is to not require the entire registry to ever be resident in memory. Instead, only necessary packages are loaded into memory and parsed. ## RegistrySource::download This is also called during the resolve phase of cargo, but only when a package has been selected to be built (actually resolved). This phase of the source will actually download and unpack the tarball for the package. Currently a configuration file is located in the root of a registry's index describing the root url to download packages from. This function is optimized for two different metrics: 1. If a tarball is downloaded, it is not downloaded again. It is assumed that once a tarball is successfully downloaded it will never change. 2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the unpacking has already occurred and does not need to happen again. With these in place, a rebuild should take almost no time at all. ## RegistrySource::get This function is simply implemented in terms of a PathSource's `get` function by creating a `PathSource` for all unpacked tarballs as part of the `download` stage. ## Filesystem layout There are a few new directories as part of the `.cargo` home folder: * `.cargo/registry/index/$hostname-$hash` - This is the directory containing the actual index of the registry. `$hostname` comes from its url, and `$hash` is the hash of the entire url. * `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a directory used to cache the downloads of packages from the registry. * `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the unpacked packages. They will be compiled from this location. # New Dependencies Cargo has picked up a new dependency on the `curl-rust` package in order to send HTTP requests to the registry as well as send HTTP requests to download tarballs.
2014-07-18 15:40:45 +00:00
[dependencies.bar]
path = "bar"
[dependencies.baz]
path = "baz"
"#)
.file("src/foo.rs", r#"
extern crate bar;
fn main() { bar::bar() }
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
2014-08-14 06:08:02 +00:00
[lib]
name = "bar"
Implement a registry source # cargo upload The cargo-upload command will take the local package and upload it to the specified registry. The local package is uploaded as a tarball compressed with gzip under maximum compression. Most of this is done by just delegating to `cargo package` The host to upload to is specified, in order of priority, by a command line `--host` flag, the `registry.host` config key, and then the default registry. The default registry is still `example.com` The registry itself is still a work in progress, but the general plumbing for a command such as this would look like: 1. Ensure the local package has been compressed into an archive. 2. Fetch the relevant registry and login token from config files. 3. Ensure all dependencies for a package are listed as coming from the same registry. 4. Upload the archive to the registry with the login token. 5. The registry will verify the package is under 2MB (configurable). 6. The registry will upload the archive to S3, calculating a checksum in the process. 7. The registry will add an entry to the registry's index (a git repository). The entry will include the name of the package, the version uploaded, the checksum of the upload, and then the list of dependencies (name/version req) 8. The local `cargo upload` command will succeed. # cargo login Uploading requires a token from the api server, and this token follows the same config chain for the host except that there is no fallback. To implement login, the `cargo login` command is used. With 0 arguments, the command will request that a site be visited for a login token, and with an argument it will set the argument as the new login token. The `util::config` module was modified to allow writing configuration as well as reading it. The support is a little lacking in that comments are blown away, but the support is there at least. # RegistrySource An implementation of `RegistrySource` has been created (deleting the old `DummyRegistrySource`). This implementation only needs a URL to be constructed, and it is assumed that the URL is running an instance of the cargo registry. ## RegistrySource::update Currently this will unconditionally update the registry's index (a git repository). Tuning is necessary to prevent updating the index each time (more coming soon). ## RegistrySource::query This is called in the resolve phase of cargo. This function is given a dependency to query for, and the source will simply look into the index to see if any package with the name is present. If found, the package's index file will be loaded and parsed into a list of summaries. The main optimization of this function is to not require the entire registry to ever be resident in memory. Instead, only necessary packages are loaded into memory and parsed. ## RegistrySource::download This is also called during the resolve phase of cargo, but only when a package has been selected to be built (actually resolved). This phase of the source will actually download and unpack the tarball for the package. Currently a configuration file is located in the root of a registry's index describing the root url to download packages from. This function is optimized for two different metrics: 1. If a tarball is downloaded, it is not downloaded again. It is assumed that once a tarball is successfully downloaded it will never change. 2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the unpacking has already occurred and does not need to happen again. With these in place, a rebuild should take almost no time at all. ## RegistrySource::get This function is simply implemented in terms of a PathSource's `get` function by creating a `PathSource` for all unpacked tarballs as part of the `download` stage. ## Filesystem layout There are a few new directories as part of the `.cargo` home folder: * `.cargo/registry/index/$hostname-$hash` - This is the directory containing the actual index of the registry. `$hostname` comes from its url, and `$hash` is the hash of the entire url. * `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a directory used to cache the downloads of packages from the registry. * `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the unpacked packages. They will be compiled from this location. # New Dependencies Cargo has picked up a new dependency on the `curl-rust` package in order to send HTTP requests to the registry as well as send HTTP requests to download tarballs.
2014-07-18 15:40:45 +00:00
[dependencies.baz]
path = "../baz"
"#)
.file("bar/src/bar.rs", r#"
pub fn bar() {}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
2014-08-14 06:08:02 +00:00
[lib]
name = "baz"
"#)
.file("baz/src/baz.rs", r#"
pub fn baz() {}
"#);
assert_that(p.cargo_process("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\
[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url(),
p.url())));
assert_that(&p.bin("foo"), existing_file());
assert_that(p.cargo("build"),
execs().with_stdout(""));
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn nested_deps_recompile() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
version = "0.5.0"
path = "src/bar"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
2015-03-26 18:17:44 +00:00
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("src/bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
2014-08-14 06:08:02 +00:00
[lib]
name = "bar"
"#)
2016-05-20 14:19:49 +00:00
.file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }");
let bar = p.url();
assert_that(p.cargo_process("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/src/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
bar,
p.url())));
::sleep_ms(1000);
File::create(&p.root().join("src/foo.rs")).unwrap().write_all(br#"
fn main() {}
"#).unwrap();
// This shouldn't recompile `bar`
assert_that(p.cargo("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n",
p.url())));
}
#[test]
fn error_message_for_missing_manifest() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "src/bar"
2014-08-14 06:08:02 +00:00
[lib]
name = "foo"
"#)
.file("src/bar/not-a-manifest", "");
assert_that(p.cargo_process("build"),
execs().with_status(101)
2016-05-12 17:06:36 +00:00
.with_stderr("\
[ERROR] Unable to update file://[..]
Caused by:
failed to read `[..]bar[..]Cargo.toml`
Caused by:
[..] (os error [..])
2016-05-12 17:06:36 +00:00
"));
}
#[test]
fn override_relative() {
let bar = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("src/lib.rs", "");
fs::create_dir(&paths::root().join(".cargo")).unwrap();
File::create(&paths::root().join(".cargo/config")).unwrap()
.write_all(br#"paths = ["bar"]"#).unwrap();
let p = project("foo")
.file("Cargo.toml", &format!(r#"
[package]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = '{}'
"#, bar.root().display()))
.file("src/lib.rs", "");
bar.build();
assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0));
}
#[test]
fn override_self() {
let bar = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("src/lib.rs", "");
let p = project("foo");
let root = p.root().clone();
let p = p
.file(".cargo/config", &format!(r#"
paths = ['{}']
"#, root.display()))
.file("Cargo.toml", &format!(r#"
[package]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = '{}'
"#, bar.root().display()))
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}");
bar.build();
assert_that(p.cargo_process("build"), execs().with_status(0));
}
#[test]
fn override_path_dep() {
let bar = project("bar")
.file("p1/Cargo.toml", r#"
[package]
name = "p1"
version = "0.5.0"
authors = []
[dependencies.p2]
path = "../p2"
"#)
.file("p1/src/lib.rs", "")
.file("p2/Cargo.toml", r#"
[package]
name = "p2"
version = "0.5.0"
authors = []
"#)
.file("p2/src/lib.rs", "");
let p = project("foo")
.file(".cargo/config", &format!(r#"
paths = ['{}', '{}']
"#, bar.root().join("p1").display(),
bar.root().join("p2").display()))
.file("Cargo.toml", &format!(r#"
[package]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.p2]
path = '{}'
"#, bar.root().join("p2").display()))
.file("src/lib.rs", "");
bar.build();
assert_that(p.cargo_process("build").arg("-v"),
execs().with_status(0));
}
#[test]
fn path_dep_build_cmd() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
version = "0.5.0"
path = "bar"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
2015-03-26 18:17:44 +00:00
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
build = "build.rs"
[lib]
name = "bar"
"#)
.file("bar/build.rs", r#"
use std::fs;
fn main() {
fs::copy("src/bar.rs.in", "src/bar.rs").unwrap();
}
"#)
.file("bar/src/bar.rs.in", r#"
2015-03-30 19:30:50 +00:00
pub fn gimme() -> i32 { 0 }
"#);
p.build();
p.root().join("bar").move_into_the_past().unwrap();
assert_that(p.cargo("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url())));
assert_that(&p.bin("foo"), existing_file());
2015-10-28 09:20:00 +00:00
assert_that(process(&p.bin("foo")),
execs().with_stdout("0\n"));
// Touching bar.rs.in should cause the `build` command to run again.
{
let file = fs::File::create(&p.root().join("bar/src/bar.rs.in"));
2015-03-30 19:30:50 +00:00
file.unwrap().write_all(br#"pub fn gimme() -> i32 { 1 }"#).unwrap();
}
assert_that(p.cargo("build"),
2016-05-20 01:07:54 +00:00
execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
[COMPILING] foo v0.5.0 ({})\n",
p.url(),
p.url())));
2015-10-28 09:20:00 +00:00
assert_that(process(&p.bin("foo")),
execs().with_stdout("1\n"));
}
#[test]
fn dev_deps_no_rebuild_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dev-dependencies.bar]
path = "bar"
[lib]
name = "foo"
doctest = false
"#)
.file("src/lib.rs", r#"
#[cfg(test)] extern crate bar;
2016-05-20 14:19:49 +00:00
#[cfg(not(test))] pub fn foo() { env!("FOO"); }
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("bar/src/lib.rs", "pub fn bar() {}");
p.build();
assert_that(p.cargo("build")
.env("FOO", "bar"),
execs().with_status(0)
2016-05-20 01:07:54 +00:00
.with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n",
p.url())));
assert_that(p.cargo("test"),
execs().with_status(0)
2016-05-19 00:51:07 +00:00
.with_stderr(&format!("\
[COMPILING] [..] v0.5.0 ({url}[..])
[COMPILING] [..] v0.5.0 ({url}[..])
2016-05-19 00:51:07 +00:00
[RUNNING] target[..]foo-[..]", url = p.url()))
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
2016-05-19 00:51:07 +00:00
"));
}
#[test]
fn custom_target_no_rebuild() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dependencies]
a = { path = "a" }
"#)
.file("src/lib.rs", "")
.file("a/Cargo.toml", r#"
[project]
name = "a"
version = "0.5.0"
authors = []
"#)
.file("a/src/lib.rs", "")
.file("b/Cargo.toml", r#"
[project]
name = "b"
version = "0.5.0"
authors = []
[dependencies]
a = { path = "../a" }
"#)
.file("b/src/lib.rs", "");
p.build();
assert_that(p.cargo("build"),
execs().with_status(0)
2016-05-14 21:15:22 +00:00
.with_stderr("\
[COMPILING] a v0.5.0 ([..])
[COMPILING] foo v0.5.0 ([..])
2016-05-12 17:06:36 +00:00
"));
assert_that(p.cargo("build")
.arg("--manifest-path=b/Cargo.toml")
.env("CARGO_TARGET_DIR", "target"),
execs().with_status(0)
2016-05-14 21:15:22 +00:00
.with_stderr("\
[COMPILING] b v0.5.0 ([..])
2016-05-12 17:06:36 +00:00
"));
}
#[test]
fn override_and_depend() {
let p = project("foo")
.file("a/a1/Cargo.toml", r#"
[project]
name = "a1"
version = "0.5.0"
authors = []
[dependencies]
a2 = { path = "../a2" }
"#)
.file("a/a1/src/lib.rs", "")
.file("a/a2/Cargo.toml", r#"
[project]
name = "a2"
version = "0.5.0"
authors = []
"#)
.file("a/a2/src/lib.rs", "")
.file("b/Cargo.toml", r#"
[project]
name = "b"
version = "0.5.0"
authors = []
[dependencies]
a1 = { path = "../a/a1" }
a2 = { path = "../a/a2" }
"#)
.file("b/src/lib.rs", "")
.file("b/.cargo/config", r#"
paths = ["../a"]
"#);
p.build();
assert_that(p.cargo("build").cwd(p.root().join("b")),
execs().with_status(0)
2016-05-14 21:15:22 +00:00
.with_stderr("\
[COMPILING] a2 v0.5.0 ([..])
[COMPILING] a1 v0.5.0 ([..])
[COMPILING] b v0.5.0 ([..])
2016-05-12 17:06:36 +00:00
"));
}
#[test]
fn missing_path_dependency() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "a"
version = "0.5.0"
authors = []
"#)
.file("src/lib.rs", "")
.file(".cargo/config", r#"
paths = ["../whoa-this-does-not-exist"]
"#);
p.build();
assert_that(p.cargo("build"),
execs().with_status(101)
2016-05-12 17:06:36 +00:00
.with_stderr("\
[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \
(defined in `[..]`)
Caused by:
failed to read directory `[..]`
Caused by:
[..] (os error [..])
2016-05-12 17:06:36 +00:00
"));
}