2014-10-17 21:02:16 +00:00
|
|
|
use std::io::{mod, fs, TempDir, File};
|
2014-06-19 22:21:00 +00:00
|
|
|
use std::os;
|
2014-06-25 05:06:11 +00:00
|
|
|
use std::path;
|
2014-06-19 22:21:00 +00:00
|
|
|
|
2014-07-16 15:08:55 +00:00
|
|
|
use support::{ResultTest, project, execs, main_file, basic_bin_manifest};
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
use support::{COMPILING, RUNNING, cargo_dir, ProjectBuilder};
|
2014-06-25 05:06:11 +00:00
|
|
|
use hamcrest::{assert_that, existing_file};
|
2014-08-16 00:57:16 +00:00
|
|
|
use support::paths::PathExt;
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
use cargo::util::process;
|
2014-03-19 01:10:48 +00:00
|
|
|
|
|
|
|
fn setup() {
|
|
|
|
}
|
|
|
|
|
2014-06-11 22:59:18 +00:00
|
|
|
test!(cargo_compile_simple {
|
2014-05-13 00:33:13 +00:00
|
|
|
let p = project("foo")
|
2014-05-27 23:14:34 +00:00
|
|
|
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
|
2014-11-22 10:04:40 +00:00
|
|
|
.file("src/foo.rs", main_file(r#""i am foo""#, &[]).as_slice());
|
2014-03-19 01:10:48 +00:00
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs());
|
2014-06-25 05:06:11 +00:00
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
2014-05-09 00:50:28 +00:00
|
|
|
|
2014-04-02 23:34:19 +00:00
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-04-02 23:34:19 +00:00
|
|
|
execs().with_stdout("i am foo\n"));
|
2014-03-19 01:10:48 +00:00
|
|
|
})
|
|
|
|
|
2014-08-01 15:52:51 +00:00
|
|
|
test!(cargo_compile_manifest_path {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
|
2014-11-22 10:04:40 +00:00
|
|
|
.file("src/foo.rs", main_file(r#""i am foo""#, &[]).as_slice());
|
2014-08-01 15:52:51 +00:00
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build")
|
2014-08-01 15:52:51 +00:00
|
|
|
.arg("--manifest-path").arg("foo/Cargo.toml")
|
|
|
|
.cwd(p.root().dir_path()),
|
|
|
|
execs().with_status(0));
|
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
})
|
|
|
|
|
2014-05-09 23:57:13 +00:00
|
|
|
test!(cargo_compile_with_invalid_manifest {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", "");
|
2014-05-09 00:50:28 +00:00
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-05-09 23:57:13 +00:00
|
|
|
execs()
|
|
|
|
.with_status(101)
|
2014-06-23 15:39:50 +00:00
|
|
|
.with_stderr("Cargo.toml is not a valid manifest\n\n\
|
2014-06-23 23:57:27 +00:00
|
|
|
No `package` or `project` section found.\n"))
|
2014-06-23 15:39:50 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(cargo_compile_with_invalid_manifest2 {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r"
|
|
|
|
[project]
|
|
|
|
foo = bar
|
|
|
|
");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-06-23 15:39:50 +00:00
|
|
|
execs()
|
|
|
|
.with_status(101)
|
|
|
|
.with_stderr("could not parse input TOML\n\
|
2014-06-24 05:21:19 +00:00
|
|
|
Cargo.toml:3:19-3:20 expected a value\n\n"))
|
2014-05-09 23:57:13 +00:00
|
|
|
})
|
2014-05-09 00:50:28 +00:00
|
|
|
|
2014-08-28 19:28:23 +00:00
|
|
|
test!(cargo_compile_with_invalid_manifest3 {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/Cargo.toml", "a = bar");
|
|
|
|
|
|
|
|
assert_that(p.cargo_process("build").arg("--manifest-path")
|
|
|
|
.arg("src/Cargo.toml"),
|
|
|
|
execs()
|
|
|
|
.with_status(101)
|
|
|
|
.with_stderr("could not parse input TOML\n\
|
|
|
|
src[..]Cargo.toml:1:5-1:6 expected a value\n\n"))
|
|
|
|
})
|
|
|
|
|
2014-06-25 01:48:38 +00:00
|
|
|
test!(cargo_compile_with_invalid_version {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
name = "foo"
|
|
|
|
authors = []
|
|
|
|
version = "1.0"
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-06-25 01:48:38 +00:00
|
|
|
execs()
|
|
|
|
.with_status(101)
|
|
|
|
.with_stderr("Cargo.toml is not a valid manifest\n\n\
|
2014-08-26 15:45:33 +00:00
|
|
|
cannot parse '1.0' as a semver for the key \
|
|
|
|
`project.version`\n"))
|
2014-06-25 01:48:38 +00:00
|
|
|
|
|
|
|
})
|
|
|
|
|
2014-05-09 23:57:13 +00:00
|
|
|
test!(cargo_compile_without_manifest {
|
2014-07-16 00:51:49 +00:00
|
|
|
let tmpdir = TempDir::new("cargo").unwrap();
|
|
|
|
let p = ProjectBuilder::new("foo", tmpdir.path().clone());
|
2014-05-09 00:50:28 +00:00
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-05-09 23:57:13 +00:00
|
|
|
execs()
|
|
|
|
.with_status(102)
|
2014-06-19 23:45:19 +00:00
|
|
|
.with_stderr("Could not find Cargo.toml in this directory or any \
|
2014-06-22 01:53:07 +00:00
|
|
|
parent directory\n"));
|
2014-05-09 23:57:13 +00:00
|
|
|
})
|
2014-05-13 00:33:13 +00:00
|
|
|
|
|
|
|
test!(cargo_compile_with_invalid_code {
|
|
|
|
let p = project("foo")
|
2014-05-27 23:14:34 +00:00
|
|
|
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
|
2014-05-13 00:33:13 +00:00
|
|
|
.file("src/foo.rs", "invalid rust code!");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-05-13 00:33:13 +00:00
|
|
|
execs()
|
|
|
|
.with_status(101)
|
2014-06-18 20:09:19 +00:00
|
|
|
.with_stderr(format!("\
|
2014-08-27 00:21:45 +00:00
|
|
|
{filename}:1:1: 1:8 error: expected item[..]found `invalid`
|
2014-06-25 05:06:11 +00:00
|
|
|
{filename}:1 invalid rust code!
|
2014-06-18 20:09:19 +00:00
|
|
|
^~~~~~~
|
2014-07-18 11:49:34 +00:00
|
|
|
Could not compile `foo`.
|
|
|
|
|
|
|
|
To learn more, run the command again with --verbose.\n",
|
2014-06-25 05:06:11 +00:00
|
|
|
filename = format!("src{}foo.rs", path::SEP)).as_slice()));
|
2014-09-03 15:23:59 +00:00
|
|
|
assert_that(&p.root().join("Cargo.lock"), existing_file());
|
2014-05-13 00:33:13 +00:00
|
|
|
})
|
|
|
|
|
2014-07-11 20:32:27 +00:00
|
|
|
test!(cargo_compile_with_invalid_code_in_deps {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
|
|
|
|
[dependencies.bar]
|
|
|
|
path = "../bar"
|
|
|
|
[dependencies.baz]
|
|
|
|
path = "../baz"
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", "invalid rust code!");
|
|
|
|
let bar = project("bar")
|
|
|
|
.file("Cargo.toml", basic_bin_manifest("bar").as_slice())
|
|
|
|
.file("src/lib.rs", "invalid rust code!");
|
|
|
|
let baz = project("baz")
|
|
|
|
.file("Cargo.toml", basic_bin_manifest("baz").as_slice())
|
|
|
|
.file("src/lib.rs", "invalid rust code!");
|
|
|
|
bar.build();
|
|
|
|
baz.build();
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(101));
|
2014-07-11 20:32:27 +00:00
|
|
|
})
|
|
|
|
|
2014-05-13 00:33:13 +00:00
|
|
|
test!(cargo_compile_with_warnings_in_the_root_package {
|
|
|
|
let p = project("foo")
|
2014-05-27 23:14:34 +00:00
|
|
|
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
|
2014-05-13 00:33:13 +00:00
|
|
|
.file("src/foo.rs", "fn main() {} fn dead() {}");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-05-13 00:33:13 +00:00
|
|
|
execs()
|
2014-06-25 05:06:11 +00:00
|
|
|
.with_stderr(format!("\
|
2014-09-25 15:16:19 +00:00
|
|
|
{filename}:1:14: 1:26 warning: function is never used: `dead`, #[warn(dead_code)] \
|
2014-06-19 23:45:19 +00:00
|
|
|
on by default
|
2014-06-25 05:06:11 +00:00
|
|
|
{filename}:1 fn main() {{}} fn dead() {{}}
|
2014-06-19 23:45:19 +00:00
|
|
|
^~~~~~~~~~~~
|
2014-06-25 05:06:11 +00:00
|
|
|
", filename = format!("src{}foo.rs", path::SEP).as_slice())));
|
2014-05-13 00:33:13 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(cargo_compile_with_warnings_in_a_dep_package {
|
|
|
|
let mut p = project("foo");
|
|
|
|
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.bar]
|
|
|
|
path = "bar"
|
2014-05-13 00:33:13 +00:00
|
|
|
|
|
|
|
[[bin]]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
"#)
|
2014-06-19 23:45:19 +00:00
|
|
|
.file("src/foo.rs",
|
2014-11-22 10:04:40 +00:00
|
|
|
main_file(r#""{}", bar::gimme()"#, &["bar"]).as_slice())
|
2014-05-13 00:33:13 +00:00
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-05-13 00:33:13 +00:00
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
"#)
|
|
|
|
.file("bar/src/bar.rs", r#"
|
2014-05-27 23:14:34 +00:00
|
|
|
pub fn gimme() -> String {
|
2014-07-09 13:38:10 +00:00
|
|
|
"test passed".to_string()
|
2014-05-13 00:33:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn dead() {}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-05-13 00:33:13 +00:00
|
|
|
execs()
|
2014-08-06 16:21:10 +00:00
|
|
|
.with_stdout(format!("{} bar v0.5.0 ({})\n\
|
|
|
|
{} foo v0.5.0 ({})\n",
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
COMPILING, p.url(),
|
|
|
|
COMPILING, p.url()))
|
2014-09-21 17:22:46 +00:00
|
|
|
.with_stderr("\
|
2014-09-25 15:16:19 +00:00
|
|
|
[..]warning: function is never used: `dead`[..]
|
2014-09-21 17:22:46 +00:00
|
|
|
[..]fn dead() {}
|
2014-10-31 18:17:29 +00:00
|
|
|
[..]^~~~~~~~~~~~
|
2014-09-21 17:22:46 +00:00
|
|
|
"));
|
2014-05-13 00:33:13 +00:00
|
|
|
|
2014-06-25 05:06:11 +00:00
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
2014-05-13 00:33:13 +00:00
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-05-13 00:33:13 +00:00
|
|
|
execs().with_stdout("test passed\n"));
|
|
|
|
})
|
2014-05-09 00:50:28 +00:00
|
|
|
|
2014-07-08 00:59:18 +00:00
|
|
|
test!(cargo_compile_with_nested_deps_inferred {
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
let p = project("foo")
|
2014-07-08 00:59:18 +00:00
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.bar]
|
|
|
|
path = 'bar'
|
2014-07-08 00:59:18 +00:00
|
|
|
|
|
|
|
[[bin]]
|
|
|
|
name = "foo"
|
|
|
|
"#)
|
|
|
|
.file("src/foo.rs",
|
2014-11-22 10:04:40 +00:00
|
|
|
main_file(r#""{}", bar::gimme()"#, &["bar"]).as_slice())
|
2014-07-08 00:59:18 +00:00
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.baz]
|
|
|
|
path = "../baz"
|
2014-07-08 00:59:18 +00:00
|
|
|
"#)
|
|
|
|
.file("bar/src/lib.rs", r#"
|
|
|
|
extern crate baz;
|
|
|
|
|
|
|
|
pub fn gimme() -> String {
|
|
|
|
baz::gimme()
|
|
|
|
}
|
|
|
|
"#)
|
|
|
|
.file("baz/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "baz"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
"#)
|
|
|
|
.file("baz/src/lib.rs", r#"
|
|
|
|
pub fn gimme() -> String {
|
2014-07-09 13:38:10 +00:00
|
|
|
"test passed".to_string()
|
2014-07-08 00:59:18 +00:00
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
p.cargo_process("build")
|
2014-07-08 00:59:18 +00:00
|
|
|
.exec_with_output()
|
|
|
|
.assert();
|
|
|
|
|
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-07-08 00:59:18 +00:00
|
|
|
execs().with_stdout("test passed\n"));
|
|
|
|
})
|
|
|
|
|
|
|
|
test!(cargo_compile_with_nested_deps_correct_bin {
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
let p = project("foo")
|
2014-07-08 00:59:18 +00:00
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.bar]
|
|
|
|
path = "bar"
|
2014-07-08 00:59:18 +00:00
|
|
|
|
|
|
|
[[bin]]
|
|
|
|
name = "foo"
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs",
|
2014-11-22 10:04:40 +00:00
|
|
|
main_file(r#""{}", bar::gimme()"#, &["bar"]).as_slice())
|
2014-07-08 00:59:18 +00:00
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.baz]
|
|
|
|
path = "../baz"
|
2014-07-08 00:59:18 +00:00
|
|
|
"#)
|
|
|
|
.file("bar/src/lib.rs", r#"
|
|
|
|
extern crate baz;
|
|
|
|
|
|
|
|
pub fn gimme() -> String {
|
|
|
|
baz::gimme()
|
|
|
|
}
|
|
|
|
"#)
|
|
|
|
.file("baz/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "baz"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
"#)
|
|
|
|
.file("baz/src/lib.rs", r#"
|
|
|
|
pub fn gimme() -> String {
|
2014-07-09 13:38:10 +00:00
|
|
|
"test passed".to_string()
|
2014-07-08 00:59:18 +00:00
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
p.cargo_process("build")
|
2014-07-08 00:59:18 +00:00
|
|
|
.exec_with_output()
|
|
|
|
.assert();
|
|
|
|
|
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-07-08 00:59:18 +00:00
|
|
|
execs().with_stdout("test passed\n"));
|
|
|
|
})
|
|
|
|
|
2014-06-11 21:50:54 +00:00
|
|
|
test!(cargo_compile_with_nested_deps_shorthand {
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
let p = project("foo")
|
2014-05-08 23:49:58 +00:00
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.bar]
|
|
|
|
path = "bar"
|
2014-05-08 23:49:58 +00:00
|
|
|
|
|
|
|
[[bin]]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
"#)
|
2014-06-19 23:45:19 +00:00
|
|
|
.file("src/foo.rs",
|
2014-11-22 10:04:40 +00:00
|
|
|
main_file(r#""{}", bar::gimme()"#, &["bar"]).as_slice())
|
2014-05-08 23:49:58 +00:00
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.baz]
|
|
|
|
path = "../baz"
|
2014-05-08 23:49:58 +00:00
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-05-08 23:49:58 +00:00
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
"#)
|
|
|
|
.file("bar/src/bar.rs", r#"
|
|
|
|
extern crate baz;
|
|
|
|
|
2014-05-27 23:14:34 +00:00
|
|
|
pub fn gimme() -> String {
|
2014-05-08 23:49:58 +00:00
|
|
|
baz::gimme()
|
|
|
|
}
|
|
|
|
"#)
|
|
|
|
.file("baz/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "baz"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-05-08 23:49:58 +00:00
|
|
|
|
|
|
|
name = "baz"
|
|
|
|
"#)
|
|
|
|
.file("baz/src/baz.rs", r#"
|
2014-05-27 23:14:34 +00:00
|
|
|
pub fn gimme() -> String {
|
2014-07-09 13:38:10 +00:00
|
|
|
"test passed".to_string()
|
2014-05-08 23:49:58 +00:00
|
|
|
}
|
2014-05-09 00:50:28 +00:00
|
|
|
"#);
|
2014-05-08 23:49:58 +00:00
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
p.cargo_process("build")
|
2014-05-08 23:49:58 +00:00
|
|
|
.exec_with_output()
|
|
|
|
.assert();
|
|
|
|
|
2014-06-25 05:06:11 +00:00
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
2014-05-08 23:49:58 +00:00
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-05-08 23:49:58 +00:00
|
|
|
execs().with_stdout("test passed\n"));
|
|
|
|
})
|
|
|
|
|
2014-06-10 00:51:53 +00:00
|
|
|
test!(cargo_compile_with_nested_deps_longhand {
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
let p = project("foo")
|
2014-06-10 00:51:53 +00:00
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
[dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
version = "0.5.0"
|
2014-06-10 00:51:53 +00:00
|
|
|
|
|
|
|
[[bin]]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
"#)
|
2014-06-19 23:45:19 +00:00
|
|
|
.file("src/foo.rs",
|
2014-11-22 10:04:40 +00:00
|
|
|
main_file(r#""{}", bar::gimme()"#, &["bar"]).as_slice())
|
2014-06-10 00:51:53 +00:00
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
|
|
|
[dependencies.baz]
|
Implement a registry source
# cargo upload
The cargo-upload command will take the local package and upload it to the
specified registry. The local package is uploaded as a tarball compressed with
gzip under maximum compression. Most of this is done by just delegating to
`cargo package` The host to upload to is specified, in order of priority, by a
command line `--host` flag, the `registry.host` config key, and then the default
registry. The default registry is still `example.com`
The registry itself is still a work in progress, but the general plumbing for a
command such as this would look like:
1. Ensure the local package has been compressed into an archive.
2. Fetch the relevant registry and login token from config files.
3. Ensure all dependencies for a package are listed as coming from the same
registry.
4. Upload the archive to the registry with the login token.
5. The registry will verify the package is under 2MB (configurable).
6. The registry will upload the archive to S3, calculating a checksum in the
process.
7. The registry will add an entry to the registry's index (a git repository).
The entry will include the name of the package, the version uploaded, the
checksum of the upload, and then the list of dependencies (name/version req)
8. The local `cargo upload` command will succeed.
# cargo login
Uploading requires a token from the api server, and this token follows the same
config chain for the host except that there is no fallback. To implement login,
the `cargo login` command is used. With 0 arguments, the command will request
that a site be visited for a login token, and with an argument it will set the
argument as the new login token.
The `util::config` module was modified to allow writing configuration as well as
reading it. The support is a little lacking in that comments are blown away, but
the support is there at least.
# RegistrySource
An implementation of `RegistrySource` has been created (deleting the old
`DummyRegistrySource`). This implementation only needs a URL to be constructed,
and it is assumed that the URL is running an instance of the cargo registry.
## RegistrySource::update
Currently this will unconditionally update the registry's index (a git
repository). Tuning is necessary to prevent updating the index each time (more
coming soon).
## RegistrySource::query
This is called in the resolve phase of cargo. This function is given a
dependency to query for, and the source will simply look into the index to see
if any package with the name is present. If found, the package's index file will
be loaded and parsed into a list of summaries.
The main optimization of this function is to not require the entire registry to
ever be resident in memory. Instead, only necessary packages are loaded into
memory and parsed.
## RegistrySource::download
This is also called during the resolve phase of cargo, but only when a package
has been selected to be built (actually resolved). This phase of the source will
actually download and unpack the tarball for the package.
Currently a configuration file is located in the root of a registry's index
describing the root url to download packages from.
This function is optimized for two different metrics:
1. If a tarball is downloaded, it is not downloaded again. It is assumed that
once a tarball is successfully downloaded it will never change.
2. If the unpacking destination has a `.cargo-ok` file, it is assumed that the
unpacking has already occurred and does not need to happen again.
With these in place, a rebuild should take almost no time at all.
## RegistrySource::get
This function is simply implemented in terms of a PathSource's `get` function by
creating a `PathSource` for all unpacked tarballs as part of the `download`
stage.
## Filesystem layout
There are a few new directories as part of the `.cargo` home folder:
* `.cargo/registry/index/$hostname-$hash` - This is the directory containing the
actual index of the registry. `$hostname` comes from its url, and `$hash` is
the hash of the entire url.
* `.cargo/registry/cache/$hostname-$hash/$pkg-$vers.tar.gz` - This is a
directory used to cache the downloads of packages from the registry.
* `.cargo/registry/src/$hostname-$hash/$pkg-$vers` - This is the location of the
unpacked packages. They will be compiled from this location.
# New Dependencies
Cargo has picked up a new dependency on the `curl-rust` package in order to send
HTTP requests to the registry as well as send HTTP requests to download
tarballs.
2014-07-18 15:40:45 +00:00
|
|
|
path = "../baz"
|
2014-06-10 00:51:53 +00:00
|
|
|
version = "0.5.0"
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-06-10 00:51:53 +00:00
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
"#)
|
|
|
|
.file("bar/src/bar.rs", r#"
|
|
|
|
extern crate baz;
|
|
|
|
|
|
|
|
pub fn gimme() -> String {
|
|
|
|
baz::gimme()
|
|
|
|
}
|
|
|
|
"#)
|
|
|
|
.file("baz/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "baz"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-06-10 00:51:53 +00:00
|
|
|
|
|
|
|
name = "baz"
|
|
|
|
"#)
|
|
|
|
.file("baz/src/baz.rs", r#"
|
|
|
|
pub fn gimme() -> String {
|
2014-07-09 13:38:10 +00:00
|
|
|
"test passed".to_string()
|
2014-06-10 00:51:53 +00:00
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs());
|
2014-06-10 00:51:53 +00:00
|
|
|
|
2014-06-25 05:06:11 +00:00
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
2014-06-10 00:51:53 +00:00
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-06-10 00:51:53 +00:00
|
|
|
execs().with_stdout("test passed\n"));
|
|
|
|
})
|
|
|
|
|
2014-07-24 11:16:20 +00:00
|
|
|
// Check that Cargo gives a sensible error if a dependency can't be found
|
|
|
|
// because of a name mismatch.
|
|
|
|
test!(cargo_compile_with_dep_name_mismatch {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
|
|
|
[[bin]]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
|
|
|
|
[dependencies.notquitebar]
|
|
|
|
|
|
|
|
path = "bar"
|
|
|
|
"#)
|
2014-11-22 10:04:40 +00:00
|
|
|
.file("src/foo.rs", main_file(r#""i am foo""#, &["bar"]).as_slice())
|
2014-07-24 11:16:20 +00:00
|
|
|
.file("bar/Cargo.toml", basic_bin_manifest("bar").as_slice())
|
2014-11-22 10:04:40 +00:00
|
|
|
.file("bar/src/bar.rs", main_file(r#""i am bar""#, &[]).as_slice());
|
2014-07-24 11:16:20 +00:00
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-07-24 11:16:20 +00:00
|
|
|
execs().with_status(101).with_stderr(format!(
|
2014-10-17 15:17:17 +00:00
|
|
|
r#"no package named `notquitebar` found (required by `foo`)
|
|
|
|
location searched: {proj_dir}
|
|
|
|
version required: *
|
2014-08-06 16:21:10 +00:00
|
|
|
"#, proj_dir = p.url())));
|
2014-07-24 11:16:20 +00:00
|
|
|
})
|
|
|
|
|
2014-03-19 01:10:48 +00:00
|
|
|
// test!(compiling_project_with_invalid_manifest)
|
2014-06-18 20:09:19 +00:00
|
|
|
|
2014-07-24 00:57:49 +00:00
|
|
|
test!(crate_version_env_vars {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.1-alpha.1"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
"#)
|
2014-08-07 15:42:16 +00:00
|
|
|
.file("src/main.rs", r#"
|
|
|
|
extern crate foo;
|
2014-07-24 00:57:49 +00:00
|
|
|
|
|
|
|
static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR");
|
|
|
|
static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR");
|
|
|
|
static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH");
|
|
|
|
static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE");
|
2014-08-25 12:40:47 +00:00
|
|
|
static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR");
|
2014-07-24 00:57:49 +00:00
|
|
|
|
|
|
|
fn main() {
|
2014-08-25 12:40:47 +00:00
|
|
|
let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR,
|
|
|
|
VERSION_MINOR, VERSION_PATCH, VERSION_PRE,
|
|
|
|
CARGO_MANIFEST_DIR);
|
2014-08-07 15:42:16 +00:00
|
|
|
assert_eq!(s, foo::version());
|
|
|
|
println!("{}", s);
|
|
|
|
}
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", r#"
|
|
|
|
pub fn version() -> String {
|
2014-08-25 12:40:47 +00:00
|
|
|
format!("{}-{}-{} @ {} in {}",
|
2014-08-07 15:42:16 +00:00
|
|
|
env!("CARGO_PKG_VERSION_MAJOR"),
|
|
|
|
env!("CARGO_PKG_VERSION_MINOR"),
|
|
|
|
env!("CARGO_PKG_VERSION_PATCH"),
|
2014-08-25 12:40:47 +00:00
|
|
|
env!("CARGO_PKG_VERSION_PRE"),
|
|
|
|
env!("CARGO_MANIFEST_DIR"))
|
2014-07-24 00:57:49 +00:00
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-07-24 00:57:49 +00:00
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-08-25 12:40:47 +00:00
|
|
|
execs().with_stdout(format!("0-5-1 @ alpha.1 in {}\n",
|
|
|
|
p.root().display()).as_slice()));
|
2014-08-07 15:42:16 +00:00
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.process(cargo_dir().join("cargo")).arg("test"),
|
|
|
|
execs().with_status(0));
|
2014-07-24 00:57:49 +00:00
|
|
|
})
|
|
|
|
|
2014-07-08 00:59:18 +00:00
|
|
|
// this is testing that src/<pkg-name>.rs still works (for now)
|
|
|
|
test!(many_crate_types_old_style_lib_location {
|
2014-06-19 22:21:00 +00:00
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-06-19 22:21:00 +00:00
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
crate_type = ["rlib", "dylib"]
|
|
|
|
"#)
|
|
|
|
.file("src/foo.rs", r#"
|
|
|
|
pub fn foo() {}
|
|
|
|
"#);
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-06-19 22:21:00 +00:00
|
|
|
execs().with_status(0));
|
|
|
|
|
|
|
|
let files = fs::readdir(&p.root().join("target")).assert();
|
|
|
|
let mut files: Vec<String> = files.iter().filter_map(|f| {
|
2014-07-08 00:59:18 +00:00
|
|
|
match f.filename_str().unwrap() {
|
2014-10-31 22:51:13 +00:00
|
|
|
"build" | "examples" | "deps" => None,
|
2014-07-08 00:59:18 +00:00
|
|
|
s if s.contains("fingerprint") || s.contains("dSYM") => None,
|
2014-07-09 13:38:10 +00:00
|
|
|
s => Some(s.to_string())
|
2014-07-08 00:59:18 +00:00
|
|
|
}
|
|
|
|
}).collect();
|
|
|
|
files.sort();
|
2014-07-17 20:20:13 +00:00
|
|
|
let file0 = files[0].as_slice();
|
|
|
|
let file1 = files[1].as_slice();
|
2014-07-08 00:59:18 +00:00
|
|
|
println!("{} {}", file0, file1);
|
|
|
|
assert!(file0.ends_with(".rlib") || file1.ends_with(".rlib"));
|
|
|
|
assert!(file0.ends_with(os::consts::DLL_SUFFIX) ||
|
|
|
|
file1.ends_with(os::consts::DLL_SUFFIX));
|
|
|
|
})
|
|
|
|
|
|
|
|
test!(many_crate_types_correct {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-07-08 00:59:18 +00:00
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
crate_type = ["rlib", "dylib"]
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", r#"
|
|
|
|
pub fn foo() {}
|
|
|
|
"#);
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-07-08 00:59:18 +00:00
|
|
|
execs().with_status(0));
|
2014-06-19 22:21:00 +00:00
|
|
|
|
|
|
|
let files = fs::readdir(&p.root().join("target")).assert();
|
|
|
|
let mut files: Vec<String> = files.iter().filter_map(|f| {
|
|
|
|
match f.filename_str().unwrap() {
|
2014-10-31 22:51:13 +00:00
|
|
|
"build" | "examples" | "deps" => None,
|
2014-07-07 22:17:34 +00:00
|
|
|
s if s.contains("fingerprint") || s.contains("dSYM") => None,
|
2014-07-09 13:38:10 +00:00
|
|
|
s => Some(s.to_string())
|
2014-06-19 22:21:00 +00:00
|
|
|
}
|
|
|
|
}).collect();
|
|
|
|
files.sort();
|
2014-07-17 20:20:13 +00:00
|
|
|
let file0 = files[0].as_slice();
|
|
|
|
let file1 = files[1].as_slice();
|
2014-06-20 01:53:18 +00:00
|
|
|
println!("{} {}", file0, file1);
|
2014-06-19 22:21:00 +00:00
|
|
|
assert!(file0.ends_with(".rlib") || file1.ends_with(".rlib"));
|
|
|
|
assert!(file0.ends_with(os::consts::DLL_SUFFIX) ||
|
|
|
|
file1.ends_with(os::consts::DLL_SUFFIX));
|
|
|
|
})
|
2014-06-27 05:53:05 +00:00
|
|
|
|
|
|
|
test!(unused_keys {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
bulid = "foo"
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-06-27 05:53:05 +00:00
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
"#)
|
|
|
|
.file("src/foo.rs", r#"
|
|
|
|
pub fn foo() {}
|
|
|
|
"#);
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-06-27 05:53:05 +00:00
|
|
|
execs().with_status(0)
|
|
|
|
.with_stderr("unused manifest key: project.bulid\n"));
|
|
|
|
|
|
|
|
let mut p = project("bar");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-06-27 05:53:05 +00:00
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
build = "foo"
|
|
|
|
"#)
|
|
|
|
.file("src/foo.rs", r#"
|
|
|
|
pub fn foo() {}
|
|
|
|
"#);
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-06-27 05:53:05 +00:00
|
|
|
execs().with_status(0)
|
|
|
|
.with_stderr("unused manifest key: lib.build\n"));
|
|
|
|
})
|
2014-06-28 20:33:29 +00:00
|
|
|
|
|
|
|
test!(self_dependency {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "test"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
|
|
|
|
[dependencies.test]
|
|
|
|
|
|
|
|
path = "."
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-06-28 20:33:29 +00:00
|
|
|
|
|
|
|
name = "test"
|
|
|
|
"#)
|
|
|
|
.file("src/test.rs", "fn main() {}");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-10-17 19:23:10 +00:00
|
|
|
execs().with_status(0));
|
2014-06-28 20:33:29 +00:00
|
|
|
})
|
2014-07-07 21:46:03 +00:00
|
|
|
|
2014-07-08 01:07:23 +00:00
|
|
|
#[cfg(not(windows))]
|
|
|
|
test!(ignore_broken_symlinks {
|
2014-07-07 21:46:03 +00:00
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
|
2014-11-22 10:04:40 +00:00
|
|
|
.file("src/foo.rs", main_file(r#""i am foo""#, &[]).as_slice())
|
2014-07-07 21:46:03 +00:00
|
|
|
.symlink("Notafile", "bar");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs());
|
2014-07-07 21:46:03 +00:00
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-07-07 21:46:03 +00:00
|
|
|
execs().with_stdout("i am foo\n"));
|
|
|
|
})
|
2014-07-06 00:10:23 +00:00
|
|
|
|
|
|
|
test!(missing_lib_and_bin {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "test"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
"#);
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-07-06 00:10:23 +00:00
|
|
|
execs().with_status(101)
|
2014-08-14 06:08:02 +00:00
|
|
|
.with_stderr("either a [lib] or [[bin]] section \
|
2014-07-06 00:10:23 +00:00
|
|
|
must be present\n"));
|
|
|
|
})
|
2014-07-09 20:55:00 +00:00
|
|
|
|
2014-11-13 02:20:44 +00:00
|
|
|
test!(lto_build {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "test"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
2014-11-14 16:49:01 +00:00
|
|
|
|
|
|
|
[profile.release]
|
2014-11-13 02:20:44 +00:00
|
|
|
lto = true
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", "fn main() {}");
|
2014-11-14 16:49:01 +00:00
|
|
|
assert_that(p.cargo_process("build").arg("-v").arg("--release"),
|
2014-11-13 02:20:44 +00:00
|
|
|
execs().with_status(0).with_stdout(format!("\
|
|
|
|
{compiling} test v0.0.0 ({url})
|
2014-11-14 16:49:01 +00:00
|
|
|
{running} `rustc {dir}{sep}src{sep}main.rs --crate-name test --crate-type bin \
|
|
|
|
--opt-level 3 \
|
2014-11-13 02:20:44 +00:00
|
|
|
-C lto \
|
2014-11-14 16:49:01 +00:00
|
|
|
--cfg ndebug \
|
|
|
|
--out-dir {dir}{sep}target{sep}release \
|
2014-11-13 02:20:44 +00:00
|
|
|
--dep-info [..] \
|
2014-11-14 16:49:01 +00:00
|
|
|
-L {dir}{sep}target{sep}release \
|
|
|
|
-L {dir}{sep}target{sep}release{sep}deps`
|
2014-11-13 02:20:44 +00:00
|
|
|
",
|
|
|
|
running = RUNNING, compiling = COMPILING, sep = path::SEP,
|
|
|
|
dir = p.root().display(),
|
|
|
|
url = p.url(),
|
|
|
|
)));
|
|
|
|
})
|
|
|
|
|
2014-07-09 20:55:00 +00:00
|
|
|
test!(verbose_build {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "test"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build").arg("-v"),
|
2014-07-29 18:27:40 +00:00
|
|
|
execs().with_status(0).with_stdout(format!("\
|
2014-09-11 14:35:01 +00:00
|
|
|
{compiling} test v0.0.0 ({url})
|
2014-09-03 18:34:26 +00:00
|
|
|
{running} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib -g \
|
2014-07-29 18:27:40 +00:00
|
|
|
-C metadata=[..] \
|
|
|
|
-C extra-filename=-[..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
--out-dir {dir}{sep}target \
|
2014-07-29 18:27:40 +00:00
|
|
|
--dep-info [..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
-L {dir}{sep}target \
|
|
|
|
-L {dir}{sep}target{sep}deps`
|
2014-09-11 14:35:01 +00:00
|
|
|
",
|
2014-07-29 18:27:40 +00:00
|
|
|
running = RUNNING, compiling = COMPILING, sep = path::SEP,
|
2014-08-06 16:21:10 +00:00
|
|
|
dir = p.root().display(),
|
|
|
|
url = p.url(),
|
2014-07-29 18:27:40 +00:00
|
|
|
)));
|
2014-07-09 20:55:00 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(verbose_release_build {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "test"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build").arg("-v").arg("--release"),
|
2014-07-29 18:27:40 +00:00
|
|
|
execs().with_status(0).with_stdout(format!("\
|
2014-09-11 14:35:01 +00:00
|
|
|
{compiling} test v0.0.0 ({url})
|
2014-07-29 18:27:40 +00:00
|
|
|
{running} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib \
|
2014-07-09 20:55:00 +00:00
|
|
|
--opt-level 3 \
|
2014-07-12 22:30:24 +00:00
|
|
|
--cfg ndebug \
|
2014-07-29 18:27:40 +00:00
|
|
|
-C metadata=[..] \
|
|
|
|
-C extra-filename=-[..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
--out-dir {dir}{sep}target{sep}release \
|
2014-07-29 18:27:40 +00:00
|
|
|
--dep-info [..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
-L {dir}{sep}target{sep}release \
|
|
|
|
-L {dir}{sep}target{sep}release{sep}deps`
|
2014-09-11 14:35:01 +00:00
|
|
|
",
|
2014-07-29 18:27:40 +00:00
|
|
|
running = RUNNING, compiling = COMPILING, sep = path::SEP,
|
2014-08-06 16:21:10 +00:00
|
|
|
dir = p.root().display(),
|
|
|
|
url = p.url(),
|
2014-07-29 18:27:40 +00:00
|
|
|
)));
|
2014-07-09 20:55:00 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(verbose_release_build_deps {
|
|
|
|
let mut p = project("foo");
|
|
|
|
p = p
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "test"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
|
|
|
|
[dependencies.foo]
|
|
|
|
path = "foo"
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "")
|
|
|
|
.file("foo/Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
2014-07-13 18:33:11 +00:00
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-07-13 18:33:11 +00:00
|
|
|
name = "foo"
|
|
|
|
crate_type = ["dylib", "rlib"]
|
2014-07-09 20:55:00 +00:00
|
|
|
"#)
|
|
|
|
.file("foo/src/lib.rs", "");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build").arg("-v").arg("--release"),
|
2014-07-29 18:27:40 +00:00
|
|
|
execs().with_status(0).with_stdout(format!("\
|
2014-09-11 14:35:01 +00:00
|
|
|
{compiling} foo v0.0.0 ({url})
|
2014-07-09 20:55:00 +00:00
|
|
|
{running} `rustc {dir}{sep}foo{sep}src{sep}lib.rs --crate-name foo \
|
2014-11-06 16:58:01 +00:00
|
|
|
--crate-type dylib --crate-type rlib -C prefer-dynamic \
|
2014-07-09 20:55:00 +00:00
|
|
|
--opt-level 3 \
|
2014-07-12 22:30:24 +00:00
|
|
|
--cfg ndebug \
|
2014-07-29 18:27:40 +00:00
|
|
|
-C metadata=[..] \
|
|
|
|
-C extra-filename=-[..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
--out-dir {dir}{sep}target{sep}release{sep}deps \
|
2014-07-29 18:27:40 +00:00
|
|
|
--dep-info [..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
-L {dir}{sep}target{sep}release{sep}deps \
|
|
|
|
-L {dir}{sep}target{sep}release{sep}deps`
|
2014-09-11 14:35:01 +00:00
|
|
|
{compiling} test v0.0.0 ({url})
|
2014-07-09 20:55:00 +00:00
|
|
|
{running} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib \
|
|
|
|
--opt-level 3 \
|
2014-07-12 22:30:24 +00:00
|
|
|
--cfg ndebug \
|
2014-07-29 18:27:40 +00:00
|
|
|
-C metadata=[..] \
|
|
|
|
-C extra-filename=-[..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
--out-dir {dir}{sep}target{sep}release \
|
2014-07-29 18:27:40 +00:00
|
|
|
--dep-info [..] \
|
2014-07-09 20:55:00 +00:00
|
|
|
-L {dir}{sep}target{sep}release \
|
2014-07-10 07:27:40 +00:00
|
|
|
-L {dir}{sep}target{sep}release{sep}deps \
|
2014-07-13 18:33:11 +00:00
|
|
|
--extern foo={dir}{sep}target{sep}release{sep}deps/\
|
2014-07-29 18:27:40 +00:00
|
|
|
{prefix}foo-[..]{suffix} \
|
|
|
|
--extern foo={dir}{sep}target{sep}release{sep}deps/libfoo-[..].rlib`
|
2014-09-11 14:35:01 +00:00
|
|
|
",
|
2014-07-09 20:55:00 +00:00
|
|
|
running = RUNNING,
|
|
|
|
compiling = COMPILING,
|
|
|
|
dir = p.root().display(),
|
2014-08-06 16:21:10 +00:00
|
|
|
url = p.url(),
|
2014-07-09 20:55:00 +00:00
|
|
|
sep = path::SEP,
|
2014-07-13 18:33:11 +00:00
|
|
|
prefix = os::consts::DLL_PREFIX,
|
2014-07-29 18:27:40 +00:00
|
|
|
suffix = os::consts::DLL_SUFFIX).as_slice()));
|
2014-07-09 20:55:00 +00:00
|
|
|
})
|
2014-07-10 22:13:53 +00:00
|
|
|
|
|
|
|
test!(explicit_examples {
|
|
|
|
let mut p = project("world");
|
|
|
|
p = p.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "world"
|
|
|
|
version = "1.0.0"
|
|
|
|
authors = []
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-07-10 22:13:53 +00:00
|
|
|
name = "world"
|
|
|
|
path = "src/lib.rs"
|
|
|
|
|
|
|
|
[[example]]
|
|
|
|
name = "hello"
|
|
|
|
path = "examples/ex-hello.rs"
|
|
|
|
|
|
|
|
[[example]]
|
|
|
|
name = "goodbye"
|
|
|
|
path = "examples/ex-goodbye.rs"
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", r#"
|
|
|
|
pub fn get_hello() -> &'static str { "Hello" }
|
|
|
|
pub fn get_goodbye() -> &'static str { "Goodbye" }
|
|
|
|
pub fn get_world() -> &'static str { "World" }
|
|
|
|
"#)
|
|
|
|
.file("examples/ex-hello.rs", r#"
|
|
|
|
extern crate world;
|
|
|
|
fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); }
|
|
|
|
"#)
|
|
|
|
.file("examples/ex-goodbye.rs", r#"
|
|
|
|
extern crate world;
|
|
|
|
fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); }
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("test"), execs());
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("examples/hello")).unwrap(),
|
2014-10-24 16:18:19 +00:00
|
|
|
execs().with_stdout("Hello, World!\n"));
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("examples/goodbye")).unwrap(),
|
2014-10-24 16:18:19 +00:00
|
|
|
execs().with_stdout("Goodbye, World!\n"));
|
2014-07-10 22:13:53 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(implicit_examples {
|
|
|
|
let mut p = project("world");
|
|
|
|
p = p.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "world"
|
|
|
|
version = "1.0.0"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", r#"
|
|
|
|
pub fn get_hello() -> &'static str { "Hello" }
|
|
|
|
pub fn get_goodbye() -> &'static str { "Goodbye" }
|
|
|
|
pub fn get_world() -> &'static str { "World" }
|
|
|
|
"#)
|
|
|
|
.file("examples/hello.rs", r#"
|
|
|
|
extern crate world;
|
|
|
|
fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); }
|
|
|
|
"#)
|
|
|
|
.file("examples/goodbye.rs", r#"
|
|
|
|
extern crate world;
|
|
|
|
fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); }
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("test"), execs().with_status(0));
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("examples/hello")).unwrap(),
|
2014-10-24 16:18:19 +00:00
|
|
|
execs().with_stdout("Hello, World!\n"));
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("examples/goodbye")).unwrap(),
|
2014-10-24 16:18:19 +00:00
|
|
|
execs().with_stdout("Goodbye, World!\n"));
|
2014-07-10 22:13:53 +00:00
|
|
|
})
|
2014-07-12 22:30:24 +00:00
|
|
|
|
|
|
|
test!(standard_build_no_ndebug {
|
|
|
|
let p = project("world")
|
|
|
|
.file("Cargo.toml", basic_bin_manifest("foo"))
|
|
|
|
.file("src/foo.rs", r#"
|
|
|
|
fn main() {
|
|
|
|
if cfg!(ndebug) {
|
|
|
|
println!("fast")
|
|
|
|
} else {
|
|
|
|
println!("slow")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("foo")).unwrap(), execs().with_stdout("slow\n"));
|
2014-07-12 22:30:24 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(release_build_ndebug {
|
|
|
|
let p = project("world")
|
|
|
|
.file("Cargo.toml", basic_bin_manifest("foo"))
|
|
|
|
.file("src/foo.rs", r#"
|
|
|
|
fn main() {
|
|
|
|
if cfg!(ndebug) {
|
|
|
|
println!("fast")
|
|
|
|
} else {
|
|
|
|
println!("slow")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build").arg("--release"),
|
2014-07-12 22:30:24 +00:00
|
|
|
execs().with_status(0));
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("release/foo")).unwrap(), execs().with_stdout("fast\n"));
|
2014-07-12 22:30:24 +00:00
|
|
|
})
|
2014-07-17 01:44:30 +00:00
|
|
|
|
|
|
|
test!(inferred_main_bin {
|
|
|
|
let p = project("world")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", r#"
|
|
|
|
fn main() {}
|
|
|
|
"#);
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("foo")).unwrap(), execs().with_status(0));
|
2014-07-17 01:44:30 +00:00
|
|
|
})
|
2014-07-18 02:53:47 +00:00
|
|
|
|
|
|
|
test!(deletion_causes_failure {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
|
|
|
|
[dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", r#"
|
|
|
|
extern crate bar;
|
|
|
|
fn main() {}
|
|
|
|
"#)
|
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "bar"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("bar/src/lib.rs", "");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-07-18 02:53:47 +00:00
|
|
|
let p = p.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
"#);
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(101));
|
2014-07-18 02:53:47 +00:00
|
|
|
})
|
2014-07-18 15:29:40 +00:00
|
|
|
|
|
|
|
test!(bad_cargo_toml_in_target_dir {
|
|
|
|
let p = project("world")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", r#"
|
|
|
|
fn main() {}
|
|
|
|
"#)
|
|
|
|
.file("target/Cargo.toml", "bad-toml");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-11-22 10:04:40 +00:00
|
|
|
assert_that(process(p.bin("foo")).unwrap(), execs().with_status(0));
|
2014-07-18 15:29:40 +00:00
|
|
|
})
|
2014-07-23 00:53:38 +00:00
|
|
|
|
|
|
|
test!(lib_with_standard_name {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "syntax"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "
|
|
|
|
pub fn foo() {}
|
|
|
|
")
|
|
|
|
.file("src/main.rs", "
|
|
|
|
extern crate syntax;
|
|
|
|
fn main() { syntax::foo() }
|
|
|
|
");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-07-23 00:53:38 +00:00
|
|
|
execs().with_status(0)
|
|
|
|
.with_stdout(format!("\
|
2014-08-06 16:21:10 +00:00
|
|
|
{compiling} syntax v0.0.1 ({dir})
|
2014-07-23 00:53:38 +00:00
|
|
|
",
|
|
|
|
compiling = COMPILING,
|
2014-08-06 16:21:10 +00:00
|
|
|
dir = p.url()).as_slice()));
|
2014-07-23 00:53:38 +00:00
|
|
|
})
|
2014-08-04 14:00:17 +00:00
|
|
|
|
|
|
|
test!(simple_staticlib {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
authors = []
|
|
|
|
version = "0.0.1"
|
|
|
|
|
2014-08-14 06:08:02 +00:00
|
|
|
[lib]
|
2014-08-04 14:00:17 +00:00
|
|
|
name = "foo"
|
|
|
|
crate-type = ["staticlib"]
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "pub fn foo() {}");
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-08-04 14:00:17 +00:00
|
|
|
})
|
2014-08-14 06:02:08 +00:00
|
|
|
|
2014-09-04 00:36:41 +00:00
|
|
|
test!(staticlib_rlib_and_bin {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
authors = []
|
|
|
|
version = "0.0.1"
|
|
|
|
|
|
|
|
[lib]
|
|
|
|
name = "foo"
|
|
|
|
crate-type = ["staticlib", "rlib"]
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "pub fn foo() {}")
|
|
|
|
.file("src/main.rs", r#"
|
|
|
|
extern crate foo;
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
foo::foo();
|
|
|
|
}"#);
|
|
|
|
|
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
|
|
|
})
|
|
|
|
|
2014-08-14 06:02:08 +00:00
|
|
|
test!(opt_out_of_lib {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
lib = []
|
|
|
|
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
authors = []
|
|
|
|
version = "0.0.1"
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "bad syntax")
|
|
|
|
.file("src/main.rs", "fn main() {}");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-08-14 06:02:08 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(opt_out_of_bin {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
bin = []
|
|
|
|
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
authors = []
|
|
|
|
version = "0.0.1"
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "")
|
|
|
|
.file("src/main.rs", "bad syntax");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-08-14 06:02:08 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
test!(single_lib {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
authors = []
|
|
|
|
version = "0.0.1"
|
|
|
|
|
|
|
|
[lib]
|
|
|
|
name = "foo"
|
|
|
|
path = "src/bar.rs"
|
|
|
|
"#)
|
|
|
|
.file("src/bar.rs", "");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"), execs().with_status(0));
|
2014-08-14 06:02:08 +00:00
|
|
|
})
|
2014-08-14 06:08:02 +00:00
|
|
|
|
|
|
|
test!(deprecated_lib {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
authors = []
|
|
|
|
version = "0.0.1"
|
|
|
|
|
|
|
|
[[lib]]
|
|
|
|
name = "foo"
|
|
|
|
"#)
|
|
|
|
.file("src/foo.rs", "");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(p.cargo_process("build"),
|
2014-08-14 06:08:02 +00:00
|
|
|
execs().with_status(0)
|
|
|
|
.with_stderr("\
|
|
|
|
the [[lib]] section has been deprecated in favor of [lib]\n"));
|
|
|
|
})
|
2014-08-16 00:57:16 +00:00
|
|
|
|
|
|
|
test!(freshness_ignores_excluded {
|
|
|
|
let foo = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
build = "true"
|
|
|
|
exclude = ["src/b*.rs"]
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "pub fn bar() -> int { 1 }");
|
|
|
|
foo.build();
|
|
|
|
foo.root().move_into_the_past().assert();
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(foo.process(cargo_dir().join("cargo")).arg("build"),
|
2014-08-16 00:57:16 +00:00
|
|
|
execs().with_status(0)
|
|
|
|
.with_stdout(format!("\
|
|
|
|
{compiling} foo v0.0.0 ({url})
|
|
|
|
", compiling = COMPILING, url = foo.url())));
|
|
|
|
|
|
|
|
// Smoke test to make sure it doesn't compile again
|
|
|
|
println!("first pass");
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(foo.process(cargo_dir().join("cargo")).arg("build"),
|
2014-08-16 00:57:16 +00:00
|
|
|
execs().with_status(0)
|
2014-09-05 17:15:10 +00:00
|
|
|
.with_stdout(""));
|
2014-08-16 00:57:16 +00:00
|
|
|
|
|
|
|
// Modify an ignored file and make sure we don't rebuild
|
|
|
|
println!("second pass");
|
|
|
|
File::create(&foo.root().join("src/bar.rs")).assert();
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(foo.process(cargo_dir().join("cargo")).arg("build"),
|
2014-08-16 00:57:16 +00:00
|
|
|
execs().with_status(0)
|
2014-09-05 17:15:10 +00:00
|
|
|
.with_stdout(""));
|
2014-08-16 00:57:16 +00:00
|
|
|
})
|
2014-08-16 01:15:13 +00:00
|
|
|
|
|
|
|
test!(rebuild_preserves_out_dir {
|
|
|
|
let mut build = project("builder");
|
|
|
|
build = build
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
2014-10-31 22:51:13 +00:00
|
|
|
name = "builder"
|
2014-08-16 01:15:13 +00:00
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", r#"
|
|
|
|
use std::os;
|
|
|
|
use std::io::File;
|
|
|
|
|
|
|
|
fn main() {{
|
|
|
|
let path = Path::new(os::getenv("OUT_DIR").unwrap()).join("foo");
|
|
|
|
if os::getenv("FIRST").is_some() {
|
|
|
|
File::create(&path).unwrap();
|
|
|
|
} else {
|
|
|
|
File::create(&path).unwrap();
|
|
|
|
}
|
|
|
|
}}
|
|
|
|
"#);
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(build.cargo_process("build"), execs().with_status(0));
|
2014-08-16 01:15:13 +00:00
|
|
|
|
|
|
|
let foo = project("foo")
|
|
|
|
.file("Cargo.toml", format!(r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
build = '{}'
|
2014-10-31 22:51:13 +00:00
|
|
|
"#, build.bin("builder").display()).as_slice())
|
2014-08-16 01:15:13 +00:00
|
|
|
.file("src/lib.rs", "pub fn bar() -> int { 1 }");
|
|
|
|
foo.build();
|
|
|
|
foo.root().move_into_the_past().assert();
|
|
|
|
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(foo.process(cargo_dir().join("cargo")).arg("build")
|
2014-08-16 01:15:13 +00:00
|
|
|
.env("FIRST", Some("1")),
|
|
|
|
execs().with_status(0)
|
|
|
|
.with_stdout(format!("\
|
|
|
|
{compiling} foo v0.0.0 ({url})
|
|
|
|
", compiling = COMPILING, url = foo.url())));
|
|
|
|
|
|
|
|
File::create(&foo.root().join("src/bar.rs")).assert();
|
2014-08-21 16:24:34 +00:00
|
|
|
assert_that(foo.process(cargo_dir().join("cargo")).arg("build"),
|
2014-08-16 01:15:13 +00:00
|
|
|
execs().with_status(0)
|
|
|
|
.with_stdout(format!("\
|
|
|
|
{compiling} foo v0.0.0 ({url})
|
|
|
|
", compiling = COMPILING, url = foo.url())));
|
|
|
|
})
|
2014-09-07 18:48:35 +00:00
|
|
|
|
|
|
|
test!(dep_no_libs {
|
|
|
|
let foo = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
|
|
|
|
[dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "pub fn bar() -> int { 1 }")
|
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "bar"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("bar/src/main.rs", "");
|
|
|
|
assert_that(foo.cargo_process("build"),
|
|
|
|
execs().with_status(101)
|
|
|
|
.with_stderr("\
|
|
|
|
Package `bar v0.0.0 ([..])` has no library targets"));
|
|
|
|
})
|
2014-10-03 01:37:27 +00:00
|
|
|
|
|
|
|
test!(recompile_space_in_name {
|
|
|
|
let foo = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
|
|
|
|
[lib]
|
|
|
|
name = "foo"
|
|
|
|
path = "src/my lib.rs"
|
|
|
|
"#)
|
|
|
|
.file("src/my lib.rs", "");
|
|
|
|
assert_that(foo.cargo_process("build"), execs().with_status(0));
|
|
|
|
foo.root().move_into_the_past().assert();
|
|
|
|
assert_that(foo.process(cargo_dir().join("cargo")).arg("build"),
|
|
|
|
execs().with_status(0).with_stdout(""));
|
|
|
|
})
|
2014-10-17 21:02:16 +00:00
|
|
|
|
|
|
|
test!(ignore_bad_directories {
|
|
|
|
let foo = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "");
|
|
|
|
foo.build();
|
|
|
|
fs::mkdir(&foo.root().join("tmp"), io::USER_EXEC ^ io::USER_EXEC).unwrap();
|
|
|
|
assert_that(foo.process(cargo_dir().join("cargo")).arg("build"),
|
|
|
|
execs().with_status(0));
|
2014-10-27 19:44:50 +00:00
|
|
|
fs::chmod(&foo.root().join("tmp"), io::USER_DIR).unwrap();
|
2014-10-17 21:02:16 +00:00
|
|
|
})
|
2014-10-24 15:48:00 +00:00
|
|
|
|
|
|
|
test!(bad_cargo_config {
|
|
|
|
let foo = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.0"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/lib.rs", "")
|
|
|
|
.file(".cargo/config", r#"
|
|
|
|
this is not valid toml
|
|
|
|
"#);
|
|
|
|
assert_that(foo.cargo_process("build").arg("-v"),
|
|
|
|
execs().with_status(101).with_stderr("\
|
|
|
|
Couldn't load Cargo configuration
|
|
|
|
|
|
|
|
Caused by:
|
|
|
|
could not parse Toml manifest; path=[..]
|
|
|
|
|
|
|
|
Caused by:
|
|
|
|
could not parse input TOML
|
|
|
|
[..].cargo[..]config:2:20-2:21 expected `=`, but found `i`
|
|
|
|
|
|
|
|
"));
|
|
|
|
})
|
2014-10-21 16:36:55 +00:00
|
|
|
|
|
|
|
#[cfg(target_os = "linux")]
|
|
|
|
test!(cargo_platform_specific_dependency {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
|
|
|
[target.i686-unknown-linux-gnu.dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
[target.x86_64-unknown-linux-gnu.dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs",
|
2014-11-22 10:04:40 +00:00
|
|
|
main_file(r#""{}", bar::gimme()"#, &["bar"]).as_slice())
|
2014-10-21 16:36:55 +00:00
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
"#)
|
|
|
|
.file("bar/src/lib.rs", r#"
|
|
|
|
pub fn gimme() -> String {
|
|
|
|
"test passed".to_string()
|
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
|
|
|
p.cargo_process("build")
|
|
|
|
.exec_with_output()
|
|
|
|
.assert();
|
|
|
|
|
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-10-21 16:36:55 +00:00
|
|
|
execs().with_stdout("test passed\n"));
|
|
|
|
})
|
|
|
|
|
|
|
|
#[cfg(not(target_os = "linux"))]
|
|
|
|
test!(cargo_platform_specific_dependency {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
|
|
|
[target.i686-unknown-linux-gnu.dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
[target.x86_64-unknown-linux-gnu.dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs",
|
2014-11-22 10:04:40 +00:00
|
|
|
main_file(r#""{}", bar::gimme()"#, &["bar"]).as_slice())
|
2014-10-21 16:36:55 +00:00
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
"#)
|
|
|
|
.file("bar/src/lib.rs", r#"
|
|
|
|
extern crate baz;
|
|
|
|
|
|
|
|
pub fn gimme() -> String {
|
|
|
|
format!("")
|
|
|
|
}
|
|
|
|
"#);
|
|
|
|
|
|
|
|
assert_that(p.cargo_process("build"),
|
|
|
|
execs().with_status(101));
|
|
|
|
})
|
|
|
|
|
|
|
|
test!(cargo_platform_specific_dependency_wrong_platform {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "foo"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
|
|
|
|
[target.non-existing-triplet.dependencies.bar]
|
|
|
|
path = "bar"
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", r#"
|
|
|
|
fn main() {}
|
|
|
|
"#)
|
|
|
|
.file("bar/Cargo.toml", r#"
|
|
|
|
[project]
|
|
|
|
|
|
|
|
name = "bar"
|
|
|
|
version = "0.5.0"
|
|
|
|
authors = ["wycats@example.com"]
|
|
|
|
"#)
|
|
|
|
.file("bar/src/lib.rs", r#"
|
|
|
|
invalid rust file, should not be compiled
|
|
|
|
"#);
|
|
|
|
|
|
|
|
p.cargo_process("build")
|
|
|
|
.exec_with_output()
|
|
|
|
.assert();
|
|
|
|
|
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
|
|
|
|
assert_that(
|
2014-11-22 10:04:40 +00:00
|
|
|
process(p.bin("foo")).unwrap(),
|
2014-10-21 16:36:55 +00:00
|
|
|
execs());
|
|
|
|
|
|
|
|
let lockfile = p.root().join("Cargo.lock");
|
|
|
|
let lockfile = File::open(&lockfile).read_to_string().assert();
|
|
|
|
assert!(lockfile.as_slice().contains("bar"))
|
|
|
|
})
|
2014-11-20 04:36:26 +00:00
|
|
|
|
|
|
|
test!(example_bin_same_name {
|
|
|
|
let p = project("foo")
|
|
|
|
.file("Cargo.toml", r#"
|
|
|
|
[package]
|
|
|
|
name = "foo"
|
|
|
|
version = "0.0.1"
|
|
|
|
authors = []
|
|
|
|
"#)
|
|
|
|
.file("src/main.rs", "fn main() {}")
|
|
|
|
.file("examples/foo.rs", "fn main() {}");
|
|
|
|
|
|
|
|
p.cargo_process("test").arg("--no-run")
|
|
|
|
.exec_with_output()
|
|
|
|
.assert();
|
|
|
|
|
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
assert_that(&p.bin("examples/foo"), existing_file());
|
|
|
|
|
|
|
|
p.process(cargo_dir().join("cargo")).arg("test").arg("--no-run")
|
|
|
|
.exec_with_output()
|
|
|
|
.assert();
|
|
|
|
|
|
|
|
assert_that(&p.bin("foo"), existing_file());
|
|
|
|
assert_that(&p.bin("examples/foo"), existing_file());
|
|
|
|
})
|