cargo/crates/cargo-test-support/src/registry.rs

1312 lines
40 KiB
Rust
Raw Normal View History

use crate::git::repo;
use crate::paths;
use crate::publish::{create_index_line, write_to_index};
use cargo_util::paths::append;
use cargo_util::Sha256;
use flate2::write::GzEncoder;
use flate2::Compression;
use std::collections::{BTreeMap, HashMap};
use std::fmt;
use std::fs::{self, File};
use std::io::{BufRead, BufReader, Read, Write};
use std::net::{SocketAddr, TcpListener, TcpStream};
use std::path::PathBuf;
use std::thread;
use tar::{Builder, Header};
use url::Url;
2019-02-03 04:01:23 +00:00
/// Gets the path to the local index pretending to be crates.io. This is a Git repo
/// initialized with a `config.json` file pointing to `dl_path` for downloads
/// and `api_path` for uploads.
2018-03-14 15:17:44 +00:00
pub fn registry_path() -> PathBuf {
generate_path("registry")
2018-03-14 15:17:44 +00:00
}
2019-02-03 04:01:23 +00:00
/// Gets the path for local web API uploads. Cargo will place the contents of a web API
/// request here. For example, `api/v1/crates/new` is the result of publishing a crate.
2018-07-17 02:20:39 +00:00
pub fn api_path() -> PathBuf {
generate_path("api")
2018-07-17 02:20:39 +00:00
}
2019-02-03 04:01:23 +00:00
/// Gets the path where crates can be downloaded using the web API endpoint. Crates
/// should be organized as `{name}/{version}/download` to match the web API
/// endpoint. This is rarely used and must be manually set up.
fn dl_path() -> PathBuf {
generate_path("dl")
2018-03-14 15:17:44 +00:00
}
2019-02-03 04:01:23 +00:00
/// Gets the alternative-registry version of `registry_path`.
fn alt_registry_path() -> PathBuf {
generate_path("alternative-registry")
2018-03-14 15:17:44 +00:00
}
/// Gets the alternative-registry version of `registry_url`.
fn alt_registry_url() -> Url {
generate_url("alternative-registry")
2018-03-14 15:17:44 +00:00
}
2019-02-03 04:01:23 +00:00
/// Gets the alternative-registry version of `dl_path`.
2018-03-14 15:17:44 +00:00
pub fn alt_dl_path() -> PathBuf {
generate_path("alternative-dl")
}
2019-02-03 04:01:23 +00:00
/// Gets the alternative-registry version of `api_path`.
2018-03-14 15:17:44 +00:00
pub fn alt_api_path() -> PathBuf {
generate_path("alternative-api")
2018-03-14 15:17:44 +00:00
}
fn generate_path(name: &str) -> PathBuf {
2019-12-14 03:42:00 +00:00
paths::root().join(name)
}
fn generate_url(name: &str) -> Url {
2019-12-14 03:42:00 +00:00
Url::from_file_path(generate_path(name)).ok().unwrap()
}
/// A builder for initializing registries.
pub struct RegistryBuilder {
/// If set, configures an alternate registry with the given name.
alternative: Option<String>,
/// If set, the authorization token for the registry.
token: Option<String>,
/// If set, serves the index over http.
http_index: bool,
/// If set, serves the API over http.
http_api: bool,
/// If set, config.json includes 'api'
api: bool,
/// Write the token in the configuration.
configure_token: bool,
/// Write the registry in configuration.
configure_registry: bool,
/// API responders.
custom_responders: HashMap<&'static str, Box<dyn Send + Fn(&Request) -> Response>>,
}
pub struct TestRegistry {
_server: Option<HttpServerHandle>,
index_url: Url,
path: PathBuf,
api_url: Url,
dl_url: Url,
token: Option<String>,
}
impl TestRegistry {
pub fn index_url(&self) -> &Url {
&self.index_url
}
pub fn api_url(&self) -> &Url {
&self.api_url
}
pub fn token(&self) -> &str {
self.token
.as_deref()
.expect("registry was not configured with a token")
}
}
impl RegistryBuilder {
#[must_use]
pub fn new() -> RegistryBuilder {
RegistryBuilder {
alternative: None,
token: Some("api-token".to_string()),
http_api: false,
http_index: false,
api: true,
configure_registry: true,
configure_token: true,
custom_responders: HashMap::new(),
}
}
/// Adds a custom HTTP response for a specific url
#[must_use]
pub fn add_responder<R: 'static + Send + Fn(&Request) -> Response>(
mut self,
url: &'static str,
responder: R,
) -> Self {
self.custom_responders.insert(url, Box::new(responder));
self
}
/// Sets whether or not to initialize as an alternative registry.
#[must_use]
pub fn alternative_named(mut self, alt: &str) -> Self {
self.alternative = Some(alt.to_string());
self
}
/// Sets whether or not to initialize as an alternative registry.
#[must_use]
pub fn alternative(self) -> Self {
self.alternative_named("alternative")
}
/// Prevents placing a token in the configuration
#[must_use]
pub fn no_configure_token(mut self) -> Self {
self.configure_token = false;
self
}
/// Prevents adding the registry to the configuration.
#[must_use]
pub fn no_configure_registry(mut self) -> Self {
self.configure_registry = false;
self
}
/// Sets the token value
#[must_use]
pub fn token(mut self, token: &str) -> Self {
self.token = Some(token.to_string());
self
}
/// Operate the index over http
#[must_use]
pub fn http_index(mut self) -> Self {
self.http_index = true;
self
}
/// Operate the api over http
#[must_use]
pub fn http_api(mut self) -> Self {
self.http_api = true;
self
}
/// The registry has no api.
#[must_use]
pub fn no_api(mut self) -> Self {
self.api = false;
self
}
/// Initializes the registry.
#[must_use]
pub fn build(self) -> TestRegistry {
let config_path = paths::home().join(".cargo/config");
t!(fs::create_dir_all(config_path.parent().unwrap()));
let prefix = if let Some(alternative) = &self.alternative {
format!("{alternative}-")
} else {
String::new()
};
let registry_path = generate_path(&format!("{prefix}registry"));
let index_url = generate_url(&format!("{prefix}registry"));
let api_url = generate_url(&format!("{prefix}api"));
let dl_url = generate_url(&format!("{prefix}dl"));
let dl_path = generate_path(&format!("{prefix}dl"));
let api_path = generate_path(&format!("{prefix}api"));
let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api {
// No need to start the HTTP server.
(None, index_url, api_url, dl_url)
} else {
let server = HttpServer::new(
registry_path.clone(),
dl_path,
self.token.clone(),
self.custom_responders,
);
let index_url = if self.http_index {
server.index_url()
} else {
index_url
};
let api_url = if self.http_api {
server.api_url()
} else {
api_url
};
let dl_url = server.dl_url();
(Some(server), index_url, api_url, dl_url)
};
let registry = TestRegistry {
api_url,
index_url,
_server: server,
dl_url,
path: registry_path,
token: self.token,
};
if self.configure_registry {
if let Some(alternative) = &self.alternative {
append(
&config_path,
format!(
"
[registries.{alternative}]
index = '{}'",
registry.index_url
)
.as_bytes(),
)
.unwrap();
} else {
append(
&config_path,
format!(
"
[source.crates-io]
replace-with = 'dummy-registry'
[source.dummy-registry]
registry = '{}'",
registry.index_url
)
.as_bytes(),
)
.unwrap();
}
}
if self.configure_token {
let token = registry.token.as_deref().unwrap();
let credentials = paths::home().join(".cargo/credentials");
if let Some(alternative) = &self.alternative {
append(
&credentials,
format!(
r#"
[registries.{alternative}]
token = "{token}"
"#
)
.as_bytes(),
)
.unwrap();
} else {
append(
&credentials,
format!(
r#"
[registry]
token = "{token}"
"#
)
.as_bytes(),
)
.unwrap();
}
}
let api = if self.api {
format!(r#","api":"{}""#, registry.api_url)
} else {
String::new()
};
// Initialize a new registry.
repo(&registry.path)
.file(
"config.json",
&format!(r#"{{"dl":"{}"{api}}}"#, registry.dl_url),
)
.build();
fs::create_dir_all(api_path.join("api/v1/crates")).unwrap();
registry
}
}
/// A builder for creating a new package in a registry.
///
/// This uses "source replacement" using an automatically generated
/// `.cargo/config` file to ensure that dependencies will use these packages
/// instead of contacting crates.io. See `source-replacement.md` for more
/// details on how source replacement works.
///
/// Call `publish` to finalize and create the package.
///
/// If no files are specified, an empty `lib.rs` file is automatically created.
///
/// The `Cargo.toml` file is automatically generated based on the methods
/// called on `Package` (for example, calling `dep()` will add to the
/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file
/// to override the generated one.
///
/// This supports different registry types:
/// - Regular source replacement that replaces `crates.io` (the default).
/// - A "local registry" which is a subset for vendoring (see
/// `Package::local`).
/// - An "alternative registry" which requires specifying the registry name
/// (see `Package::alternative`).
///
/// This does not support "directory sources". See `directory.rs` for
/// `VendorPackage` which implements directory sources.
///
/// # Example
/// ```
/// // Publish package "a" depending on "b".
/// Package::new("a", "1.0.0")
/// .dep("b", "1.0.0")
/// .file("src/lib.rs", r#"
/// extern crate b;
/// pub fn f() -> i32 { b::f() * 2 }
/// "#)
/// .publish();
///
/// // Publish package "b".
/// Package::new("b", "1.0.0")
/// .file("src/lib.rs", r#"
/// pub fn f() -> i32 { 12 }
/// "#)
/// .publish();
///
/// // Create a project that uses package "a".
/// let p = project()
/// .file("Cargo.toml", r#"
/// [package]
/// name = "foo"
/// version = "0.0.1"
///
/// [dependencies]
/// a = "1.0"
/// "#)
/// .file("src/main.rs", r#"
/// extern crate a;
/// fn main() { println!("{}", a::f()); }
/// "#)
/// .build();
///
/// p.cargo("run").with_stdout("24").run();
/// ```
2019-04-02 20:27:42 +00:00
#[must_use]
pub struct Package {
name: String,
vers: String,
deps: Vec<Dependency>,
files: Vec<PackageFile>,
yanked: bool,
2021-02-10 19:15:19 +00:00
features: FeatureMap,
local: bool,
alternative: bool,
invalid_json: bool,
proc_macro: bool,
2020-05-22 15:38:40 +00:00
links: Option<String>,
rust_version: Option<String>,
cargo_features: Vec<String>,
2021-02-10 18:58:07 +00:00
v: Option<u32>,
}
pub(crate) type FeatureMap = BTreeMap<String, Vec<String>>;
2021-02-10 19:15:19 +00:00
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
#[derive(Clone)]
pub struct Dependency {
name: String,
vers: String,
kind: String,
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
artifact: Option<(String, Option<String>)>,
target: Option<String>,
features: Vec<String>,
registry: Option<String>,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
package: Option<String>,
optional: bool,
}
2022-08-26 00:12:25 +00:00
/// Entry with data that corresponds to [`tar::EntryType`].
#[non_exhaustive]
enum EntryData {
Regular(String),
Symlink(PathBuf),
}
/// A file to be created in a package.
struct PackageFile {
path: String,
2022-08-26 00:12:25 +00:00
contents: EntryData,
/// The Unix mode for the file. Note that when extracted on Windows, this
/// is mostly ignored since it doesn't have the same style of permissions.
mode: u32,
/// If `true`, the file is created in the root of the tarfile, used for
/// testing invalid packages.
extra: bool,
}
const DEFAULT_MODE: u32 = 0o644;
/// Initializes the on-disk registry and sets up the config so that crates.io
/// is replaced with the one on disk.
pub fn init() -> TestRegistry {
RegistryBuilder::new().build()
}
/// Variant of `init` that initializes the "alternative" registry and crates.io
/// replacement.
pub fn alt_init() -> TestRegistry {
init();
RegistryBuilder::new().alternative().build()
2019-12-14 03:42:00 +00:00
}
pub struct HttpServerHandle {
2022-03-09 22:10:22 +00:00
addr: SocketAddr,
}
impl HttpServerHandle {
pub fn index_url(&self) -> Url {
Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap()
}
pub fn api_url(&self) -> Url {
Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap()
}
pub fn dl_url(&self) -> Url {
Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap()
2022-03-09 22:10:22 +00:00
}
}
impl Drop for HttpServerHandle {
2022-03-09 22:10:22 +00:00
fn drop(&mut self) {
if let Ok(mut stream) = TcpStream::connect(self.addr) {
// shutdown the server
let _ = stream.write_all(b"stop");
let _ = stream.flush();
}
2022-03-09 22:10:22 +00:00
}
}
/// Request to the test http server
pub struct Request {
pub url: Url,
pub method: String,
pub body: Option<Vec<u8>>,
pub authorization: Option<String>,
pub if_modified_since: Option<String>,
pub if_none_match: Option<String>,
}
2022-03-09 22:10:22 +00:00
impl fmt::Debug for Request {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// body is not included as it can produce long debug outputs
f.debug_struct("Request")
.field("url", &self.url)
.field("method", &self.method)
.field("authorization", &self.authorization)
.field("if_modified_since", &self.if_modified_since)
.field("if_none_match", &self.if_none_match)
.finish()
}
}
/// Response from the test http server
pub struct Response {
pub code: u32,
pub headers: Vec<String>,
pub body: Vec<u8>,
}
struct HttpServer {
listener: TcpListener,
registry_path: PathBuf,
dl_path: PathBuf,
token: Option<String>,
custom_responders: HashMap<&'static str, Box<dyn Send + Fn(&Request) -> Response>>,
}
impl HttpServer {
pub fn new(
registry_path: PathBuf,
dl_path: PathBuf,
token: Option<String>,
api_responders: HashMap<&'static str, Box<dyn Send + Fn(&Request) -> Response>>,
) -> HttpServerHandle {
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = listener.local_addr().unwrap();
let server = HttpServer {
listener,
registry_path,
dl_path,
token,
custom_responders: api_responders,
};
thread::spawn(move || server.start());
HttpServerHandle { addr }
}
fn start(&self) {
2022-03-09 22:10:22 +00:00
let mut line = String::new();
'server: loop {
let (socket, _) = self.listener.accept().unwrap();
2022-03-09 22:10:22 +00:00
let mut buf = BufReader::new(socket);
line.clear();
if buf.read_line(&mut line).unwrap() == 0 {
// Connection terminated.
continue;
}
// Read the "GET path HTTP/1.1" line.
let mut parts = line.split_ascii_whitespace();
let method = parts.next().unwrap().to_ascii_lowercase();
if method == "stop" {
// Shutdown the server.
return;
}
let addr = self.listener.local_addr().unwrap();
let url = format!(
"http://{}/{}",
addr,
parts.next().unwrap().trim_start_matches('/')
2022-03-09 22:10:22 +00:00
);
let url = Url::parse(&url).unwrap();
// Grab headers we care about.
let mut if_modified_since = None;
let mut if_none_match = None;
let mut authorization = None;
let mut content_len = None;
loop {
line.clear();
if buf.read_line(&mut line).unwrap() == 0 {
continue 'server;
}
if line == "\r\n" {
// End of headers.
2022-03-09 22:10:22 +00:00
line.clear();
break;
}
let (name, value) = line.split_once(':').unwrap();
let name = name.trim().to_ascii_lowercase();
let value = value.trim().to_string();
match name.as_str() {
"if-modified-since" => if_modified_since = Some(value),
"if-none-match" => if_none_match = Some(value),
"authorization" => authorization = Some(value),
"content-length" => content_len = Some(value),
_ => {}
}
}
let mut body = None;
if let Some(con_len) = content_len {
let len = con_len.parse::<u64>().unwrap();
let mut content = vec![0u8; len as usize];
buf.read_exact(&mut content).unwrap();
body = Some(content)
}
let req = Request {
authorization,
if_modified_since,
if_none_match,
method,
url,
body,
};
println!("req: {:#?}", req);
let response = self.route(&req);
let buf = buf.get_mut();
write!(buf, "HTTP/1.1 {}\r\n", response.code).unwrap();
write!(buf, "Content-Length: {}\r\n", response.body.len()).unwrap();
for header in response.headers {
write!(buf, "{}\r\n", header).unwrap();
}
write!(buf, "\r\n").unwrap();
buf.write_all(&response.body).unwrap();
buf.flush().unwrap();
}
}
2022-03-09 22:10:22 +00:00
/// Route the request
fn route(&self, req: &Request) -> Response {
let authorized = |mutatation: bool| {
if mutatation {
self.token == req.authorization
} else {
assert!(req.authorization.is_none(), "unexpected token");
true
}
};
2022-03-09 22:10:22 +00:00
// Check for custom responder
if let Some(responder) = self.custom_responders.get(req.url.path()) {
return responder(&req);
}
let path: Vec<_> = req.url.path()[1..].split('/').collect();
match (req.method.as_str(), path.as_slice()) {
("get", ["index", ..]) => {
if !authorized(false) {
self.unauthorized(req)
} else {
self.index(&req)
2022-03-09 22:10:22 +00:00
}
}
("get", ["dl", ..]) => {
if !authorized(false) {
self.unauthorized(req)
} else {
self.dl(&req)
2022-03-09 22:10:22 +00:00
}
}
// publish
("put", ["api", "v1", "crates", "new"]) => {
if !authorized(true) {
self.unauthorized(req)
} else {
self.publish(req)
}
}
// The remainder of the operators in the test framework do nothing other than responding 'ok'.
//
// Note: We don't need to support anything real here because there are no tests that
// currently require anything other than publishing via the http api.
// yank
("delete", ["api", "v1", "crates", .., "yank"])
// unyank
| ("put", ["api", "v1", "crates", .., "unyank"])
// owners
| ("get" | "put" | "delete", ["api", "v1", "crates", .., "owners"]) => {
if !authorized(true) {
self.unauthorized(req)
} else {
self.ok(&req)
2022-03-09 22:10:22 +00:00
}
}
_ => self.not_found(&req),
}
}
/// Unauthorized response
fn unauthorized(&self, _req: &Request) -> Response {
Response {
code: 401,
headers: vec![],
body: b"Unauthorized message from server.".to_vec(),
}
}
/// Not found response
fn not_found(&self, _req: &Request) -> Response {
Response {
code: 404,
headers: vec![],
body: b"not found".to_vec(),
}
}
/// Respond OK without doing anything
fn ok(&self, _req: &Request) -> Response {
Response {
code: 200,
headers: vec![],
body: br#"{"ok": true, "msg": "completed!"}"#.to_vec(),
}
}
/// Serve the download endpoint
fn dl(&self, req: &Request) -> Response {
let file = self
.dl_path
.join(req.url.path().strip_prefix("/dl/").unwrap());
println!("{}", file.display());
if !file.exists() {
return self.not_found(req);
}
return Response {
body: fs::read(&file).unwrap(),
code: 200,
headers: vec![],
};
}
2022-03-09 22:10:22 +00:00
/// Serve the registry index
fn index(&self, req: &Request) -> Response {
let file = self
.registry_path
.join(req.url.path().strip_prefix("/index/").unwrap());
if !file.exists() {
return self.not_found(req);
} else {
// Now grab info about the file.
let data = fs::read(&file).unwrap();
let etag = Sha256::new().update(&data).finish_hex();
let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap());
// Start to construct our response:
let mut any_match = false;
let mut all_match = true;
if let Some(expected) = &req.if_none_match {
if &etag != expected {
all_match = false;
2022-03-09 22:10:22 +00:00
} else {
any_match = true;
2022-03-09 22:10:22 +00:00
}
}
if let Some(expected) = &req.if_modified_since {
// NOTE: Equality comparison is good enough for tests.
if &last_modified != expected {
all_match = false;
} else {
any_match = true;
2022-03-09 22:10:22 +00:00
}
}
2022-03-09 22:10:22 +00:00
if any_match && all_match {
return Response {
body: Vec::new(),
code: 304,
headers: vec![],
};
} else {
return Response {
body: data,
code: 200,
headers: vec![
format!("ETag: \"{}\"", etag),
format!("Last-Modified: {}", last_modified),
],
};
2022-03-09 22:10:22 +00:00
}
}
}
fn publish(&self, req: &Request) -> Response {
if let Some(body) = &req.body {
// Get the metadata of the package
let (len, remaining) = body.split_at(4);
let json_len = u32::from_le_bytes(len.try_into().unwrap());
let (json, remaining) = remaining.split_at(json_len as usize);
let new_crate = serde_json::from_slice::<crates_io::NewCrate>(json).unwrap();
// Get the `.crate` file
let (len, remaining) = remaining.split_at(4);
let file_len = u32::from_le_bytes(len.try_into().unwrap());
let (file, _remaining) = remaining.split_at(file_len as usize);
// Write the `.crate`
let dst = self
.dl_path
.join(&new_crate.name)
.join(&new_crate.vers)
.join("download");
t!(fs::create_dir_all(dst.parent().unwrap()));
t!(fs::write(&dst, file));
let deps = new_crate
.deps
.iter()
.map(|dep| {
let (name, package) = match &dep.explicit_name_in_toml {
Some(explicit) => (explicit.to_string(), Some(dep.name.to_string())),
None => (dep.name.to_string(), None),
};
serde_json::json!({
"name": name,
"req": dep.version_req,
"features": dep.features,
"default_features": true,
"target": dep.target,
"optional": dep.optional,
"kind": dep.kind,
"registry": dep.registry,
"package": package,
})
})
.collect::<Vec<_>>();
let line = create_index_line(
serde_json::json!(new_crate.name),
&new_crate.vers,
deps,
&cksum(file),
new_crate.features,
false,
new_crate.links,
None,
);
write_to_index(&self.registry_path, &new_crate.name, line, false);
self.ok(&req)
} else {
Response {
code: 400,
headers: vec![],
body: b"The request was missing a body".to_vec(),
}
}
}
2022-03-09 22:10:22 +00:00
}
impl Package {
2019-02-03 04:01:23 +00:00
/// Creates a new package builder.
/// Call `publish()` to finalize and build the package.
pub fn new(name: &str, vers: &str) -> Package {
let config = paths::home().join(".cargo/config");
if !config.exists() {
init();
}
Package {
name: name.to_string(),
vers: vers.to_string(),
deps: Vec::new(),
files: Vec::new(),
yanked: false,
2021-02-10 19:15:19 +00:00
features: BTreeMap::new(),
local: false,
alternative: false,
invalid_json: false,
proc_macro: false,
2020-05-22 15:38:40 +00:00
links: None,
rust_version: None,
cargo_features: Vec::new(),
2021-02-10 18:58:07 +00:00
v: None,
}
}
/// Call with `true` to publish in a "local registry".
///
/// See `source-replacement.html#local-registry-sources` for more details
/// on local registries. See `local_registry.rs` for the tests that use
/// this.
pub fn local(&mut self, local: bool) -> &mut Package {
self.local = local;
self
}
/// Call with `true` to publish in an "alternative registry".
///
/// The name of the alternative registry is called "alternative".
///
2019-02-11 23:16:13 +00:00
/// See `src/doc/src/reference/registries.md` for more details on
/// alternative registries. See `alt_registry.rs` for the tests that use
/// this.
pub fn alternative(&mut self, alternative: bool) -> &mut Package {
self.alternative = alternative;
self
}
2019-02-03 04:01:23 +00:00
/// Adds a file to the package.
pub fn file(&mut self, name: &str, contents: &str) -> &mut Package {
self.file_with_mode(name, DEFAULT_MODE, contents)
}
/// Adds a file with a specific Unix mode.
pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package {
self.files.push(PackageFile {
path: path.to_string(),
2022-08-26 00:12:25 +00:00
contents: EntryData::Regular(contents.into()),
mode,
extra: false,
});
self
}
2022-08-26 00:12:25 +00:00
/// Adds a symlink to a path to the package.
pub fn symlink(&mut self, dst: &str, src: &str) -> &mut Package {
self.files.push(PackageFile {
path: dst.to_string(),
contents: EntryData::Symlink(src.into()),
mode: DEFAULT_MODE,
extra: false,
});
self
}
2019-02-03 04:01:23 +00:00
/// Adds an "extra" file that is not rooted within the package.
///
/// Normal files are automatically placed within a directory named
/// `$PACKAGE-$VERSION`. This allows you to override that behavior,
/// typically for testing invalid behavior.
pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package {
self.files.push(PackageFile {
path: path.to_string(),
2022-08-26 00:12:25 +00:00
contents: EntryData::Regular(contents.to_string()),
mode: DEFAULT_MODE,
extra: true,
});
self
}
2019-02-03 04:01:23 +00:00
/// Adds a normal dependency. Example:
/// ```
/// [dependencies]
/// foo = {version = "1.0"}
/// ```
pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(&Dependency::new(name, vers))
}
2019-02-03 04:01:23 +00:00
/// Adds a dependency with the given feature. Example:
/// ```
/// [dependencies]
/// foo = {version = "1.0", "features": ["feat1", "feat2"]}
/// ```
2018-03-14 15:17:44 +00:00
pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).enable_features(features))
}
2019-02-03 04:01:23 +00:00
/// Adds a platform-specific dependency. Example:
/// ```
/// [target.'cfg(windows)'.dependencies]
/// foo = {version = "1.0"}
/// ```
2018-03-14 15:17:44 +00:00
pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).target(target))
}
2019-02-03 04:01:23 +00:00
/// Adds a dependency to the alternative registry.
pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package {
self.add_dep(Dependency::new(name, vers).registry("alternative"))
}
2019-02-03 04:01:23 +00:00
/// Adds a dev-dependency. Example:
/// ```
/// [dev-dependencies]
/// foo = {version = "1.0"}
/// ```
pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).dev())
}
2019-02-03 04:01:23 +00:00
/// Adds a build-dependency. Example:
/// ```
/// [build-dependencies]
/// foo = {version = "1.0"}
/// ```
pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).build())
}
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package {
self.deps.push(dep.clone());
self
}
2019-02-03 04:01:23 +00:00
/// Specifies whether or not the package is "yanked".
pub fn yanked(&mut self, yanked: bool) -> &mut Package {
self.yanked = yanked;
self
}
/// Specifies whether or not this is a proc macro.
pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package {
self.proc_macro = proc_macro;
self
}
2019-02-03 04:01:23 +00:00
/// Adds an entry in the `[features]` section.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package {
let deps = deps.iter().map(|s| s.to_string()).collect();
self.features.insert(name.to_string(), deps);
self
}
/// Specify a minimal Rust version.
pub fn rust_version(&mut self, rust_version: &str) -> &mut Package {
self.rust_version = Some(rust_version.into());
self
}
/// Causes the JSON line emitted in the index to be invalid, presumably
/// causing Cargo to skip over this version.
pub fn invalid_json(&mut self, invalid: bool) -> &mut Package {
self.invalid_json = invalid;
self
}
2020-05-22 15:38:40 +00:00
pub fn links(&mut self, links: &str) -> &mut Package {
self.links = Some(links.to_string());
self
}
pub fn cargo_feature(&mut self, feature: &str) -> &mut Package {
self.cargo_features.push(feature.to_owned());
self
}
2021-02-10 18:58:07 +00:00
/// Sets the index schema version for this package.
///
2021-03-20 21:38:34 +00:00
/// See `cargo::sources::registry::RegistryPackage` for more information.
2021-02-10 18:58:07 +00:00
pub fn schema_version(&mut self, version: u32) -> &mut Package {
self.v = Some(version);
self
}
2019-02-03 04:01:23 +00:00
/// Creates the package and place it in the registry.
///
/// This does not actually use Cargo's publishing system, but instead
/// manually creates the entry in the registry on the filesystem.
///
/// Returns the checksum for the package.
2016-07-05 17:28:51 +00:00
pub fn publish(&self) -> String {
self.make_archive();
2019-02-03 04:01:23 +00:00
// Figure out what we're going to write into the index.
let deps = self
.deps
2018-03-14 15:17:44 +00:00
.iter()
.map(|dep| {
// In the index, the `registry` is null if it is from the same registry.
// In Cargo.toml, it is None if it is from crates.io.
2020-03-16 01:51:38 +00:00
let registry_url = match (self.alternative, dep.registry.as_deref()) {
(false, None) => None,
(false, Some("alternative")) => Some(alt_registry_url().to_string()),
(true, None) => {
Some("https://github.com/rust-lang/crates.io-index".to_string())
}
2020-03-16 01:51:38 +00:00
(true, Some("alternative")) => None,
_ => panic!("registry_dep currently only supports `alternative`"),
};
serde_json::json!({
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
"name": dep.name,
"req": dep.vers,
"features": dep.features,
"default_features": true,
"target": dep.target,
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
"artifact": dep.artifact,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
"optional": dep.optional,
"kind": dep.kind,
"registry": registry_url,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
"package": dep.package,
})
2018-12-08 11:19:47 +00:00
})
.collect::<Vec<_>>();
let cksum = {
let c = t!(fs::read(&self.archive_dst()));
cksum(&c)
};
let name = if self.invalid_json {
serde_json::json!(1)
} else {
serde_json::json!(self.name)
};
let line = create_index_line(
name,
&self.vers,
deps,
&cksum,
self.features.clone(),
self.yanked,
self.links.clone(),
self.v,
);
2018-03-14 15:17:44 +00:00
let registry_path = if self.alternative {
alt_registry_path()
} else {
registry_path()
};
write_to_index(&registry_path, &self.name, line, self.local);
2016-07-05 17:28:51 +00:00
cksum
}
fn make_archive(&self) {
let dst = self.archive_dst();
t!(fs::create_dir_all(dst.parent().unwrap()));
let f = t!(File::create(&dst));
let mut a = Builder::new(GzEncoder::new(f, Compression::default()));
if !self
.files
.iter()
.any(|PackageFile { path, .. }| path == "Cargo.toml")
{
self.append_manifest(&mut a);
}
if self.files.is_empty() {
2022-09-14 14:14:11 +00:00
self.append(
&mut a,
"src/lib.rs",
DEFAULT_MODE,
&EntryData::Regular("".into()),
);
} else {
for PackageFile {
path,
contents,
mode,
extra,
} in &self.files
{
if *extra {
self.append_raw(&mut a, path, *mode, contents);
} else {
self.append(&mut a, path, *mode, contents);
}
}
}
}
fn append_manifest<W: Write>(&self, ar: &mut Builder<W>) {
let mut manifest = String::new();
if !self.cargo_features.is_empty() {
manifest.push_str(&format!(
"cargo-features = {}\n\n",
toml_edit::ser::to_item(&self.cargo_features).unwrap()
));
}
manifest.push_str(&format!(
2018-03-14 15:17:44 +00:00
r#"
[package]
name = "{}"
version = "{}"
authors = []
2018-03-14 15:17:44 +00:00
"#,
self.name, self.vers
));
if let Some(version) = &self.rust_version {
manifest.push_str(&format!("rust-version = \"{}\"", version));
}
for dep in self.deps.iter() {
let target = match dep.target {
None => String::new(),
Some(ref s) => format!("target.'{}'.", s),
};
let kind = match &dep.kind[..] {
"build" => "build-",
"dev" => "dev-",
2018-03-14 15:17:44 +00:00
_ => "",
};
2018-03-14 15:17:44 +00:00
manifest.push_str(&format!(
r#"
[{}{}dependencies.{}]
version = "{}"
2018-03-14 15:17:44 +00:00
"#,
target, kind, dep.name, dep.vers
));
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
if let Some((artifact, target)) = &dep.artifact {
manifest.push_str(&format!("artifact = \"{}\"\n", artifact));
if let Some(target) = &target {
manifest.push_str(&format!("target = \"{}\"\n", target))
}
}
if let Some(registry) = &dep.registry {
2018-12-31 00:07:58 +00:00
assert_eq!(registry, "alternative");
manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url()));
}
}
if self.proc_macro {
manifest.push_str("[lib]\nproc-macro = true\n");
}
2022-09-14 14:14:11 +00:00
self.append(
ar,
"Cargo.toml",
DEFAULT_MODE,
&EntryData::Regular(manifest.into()),
);
}
2022-08-26 00:12:25 +00:00
fn append<W: Write>(&self, ar: &mut Builder<W>, file: &str, mode: u32, contents: &EntryData) {
self.append_raw(
2018-03-14 15:17:44 +00:00
ar,
&format!("{}-{}/{}", self.name, self.vers, file),
mode,
2018-03-14 15:17:44 +00:00
contents,
);
}
2022-09-14 14:14:11 +00:00
fn append_raw<W: Write>(
&self,
ar: &mut Builder<W>,
path: &str,
mode: u32,
contents: &EntryData,
) {
let mut header = Header::new_ustar();
2022-08-26 00:12:25 +00:00
let contents = match contents {
EntryData::Regular(contents) => contents.as_str(),
EntryData::Symlink(src) => {
header.set_entry_type(tar::EntryType::Symlink);
t!(header.set_link_name(src));
"" // Symlink has no contents.
}
};
header.set_size(contents.len() as u64);
t!(header.set_path(path));
header.set_mode(mode);
header.set_cksum();
t!(ar.append(&header, contents.as_bytes()));
}
/// Returns the path to the compressed package file.
pub fn archive_dst(&self) -> PathBuf {
if self.local {
2018-03-14 15:17:44 +00:00
registry_path().join(format!("{}-{}.crate", self.name, self.vers))
} else if self.alternative {
alt_dl_path()
.join(&self.name)
.join(&self.vers)
.join("download")
} else {
dl_path().join(&self.name).join(&self.vers).join("download")
}
}
}
2016-07-05 17:28:51 +00:00
pub fn cksum(s: &[u8]) -> String {
Import the cargo-vendor subcommand into Cargo This commit imports the external [alexcrichton/cargo-vendor repository][repo] into Cargo itself. This means it will no longer be necessary to install the `cargo-vendor` subcommand in order to vendor dependencies. Additionally it'll always support the latest feature set of Cargo as it'll be built into Cargo! All tests were imported as part of this commit, but not all features were imported. Some flags have been left out that were added later in the lifetime of `cargo vendor` which seem like they're more questionable to stabilize. I'm hoping that they can have separate PRs adding their implementation here, and we can make a decision of their stabilization at a later date. The current man page for `cargo vendor -h` will look like: cargo-vendor Vendor all dependencies for a project locally USAGE: cargo vendor [OPTIONS] [--] [path] OPTIONS: -q, --quiet No output printed to stdout --manifest-path <PATH> Path to Cargo.toml --no-delete Don't delete older crates in the vendor directory -s, --sync <TOML>... Additional `Cargo.toml` to sync and vendor --respect-source-config Respect `[source]` config in `.cargo/config` -v, --verbose Use verbose output (-vv very verbose/build.rs output) --color <WHEN> Coloring: auto, always, never --frozen Require Cargo.lock and cache are up to date --locked Require Cargo.lock is up to date -Z <FLAG>... Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details -h, --help Prints help information ARGS: <path> Where to vendor crates (`vendor` by default) This cargo subcommand will vendor all crates.io and git dependencies for a project into the specified directory at `<path>`. After this command completes the vendor directory specified by `<path>` will contain all remote sources from dependencies specified. Additionally manifest beyond the default one can be specified with the `-s` option. The `cargo vendor` command will also print out the configuration necessary to use the vendored sources, which when needed is then encoded into `.cargo/config`. Since this change is not importing 100% of the functionality of the existing `cargo vendor` this change does run a risk of being a breaking change for any folks using such functionality. Executing `cargo vendor` will favor the built-in command rather than an external subcommand, causing unimplemented features to become errors about flag usage. [repo]: https://github.com/alexcrichton/cargo-vendor
2019-04-23 00:54:27 +00:00
Sha256::new().update(s).finish_hex()
}
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
impl Dependency {
pub fn new(name: &str, vers: &str) -> Dependency {
Dependency {
name: name.to_string(),
vers: vers.to_string(),
kind: "normal".to_string(),
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
artifact: None,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
target: None,
features: Vec::new(),
package: None,
optional: false,
registry: None,
}
}
2019-02-03 04:01:23 +00:00
/// Changes this to `[build-dependencies]`.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn build(&mut self) -> &mut Self {
self.kind = "build".to_string();
self
}
2019-02-03 04:01:23 +00:00
/// Changes this to `[dev-dependencies]`.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn dev(&mut self) -> &mut Self {
self.kind = "dev".to_string();
self
}
2019-02-03 04:01:23 +00:00
/// Changes this to `[target.$target.dependencies]`.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn target(&mut self, target: &str) -> &mut Self {
self.target = Some(target.to_string());
self
}
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
/// Change the artifact to be of the given kind, like "bin", or "staticlib",
/// along with a specific target triple if provided.
pub fn artifact(&mut self, kind: &str, target: Option<String>) -> &mut Self {
self.artifact = Some((kind.to_string(), target));
self
}
2019-02-03 04:01:23 +00:00
/// Adds `registry = $registry` to this dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn registry(&mut self, registry: &str) -> &mut Self {
self.registry = Some(registry.to_string());
self
}
2019-02-03 04:01:23 +00:00
/// Adds `features = [ ... ]` to this dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn enable_features(&mut self, features: &[&str]) -> &mut Self {
self.features.extend(features.iter().map(|s| s.to_string()));
self
}
2019-02-03 04:01:23 +00:00
/// Adds `package = ...` to this dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn package(&mut self, pkg: &str) -> &mut Self {
self.package = Some(pkg.to_string());
self
}
2019-02-03 04:01:23 +00:00
/// Changes this to an optional dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn optional(&mut self, optional: bool) -> &mut Self {
self.optional = optional;
self
}
}