cargo/crates/cargo-test-support/src/registry.rs

1047 lines
33 KiB
Rust
Raw Normal View History

use crate::git::repo;
use crate::paths;
use cargo_util::{registry::make_dep_path, Sha256};
use flate2::write::GzEncoder;
use flate2::Compression;
2021-02-10 19:15:19 +00:00
use std::collections::BTreeMap;
use std::fmt::Write as _;
use std::fs::{self, File};
use std::io::{BufRead, BufReader, Write};
2022-03-09 22:10:22 +00:00
use std::net::{SocketAddr, TcpListener};
use std::path::{Path, PathBuf};
2022-03-09 22:10:22 +00:00
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use tar::{Builder, Header};
use url::Url;
2019-02-03 04:01:23 +00:00
/// Gets the path to the local index pretending to be crates.io. This is a Git repo
/// initialized with a `config.json` file pointing to `dl_path` for downloads
/// and `api_path` for uploads.
2018-03-14 15:17:44 +00:00
pub fn registry_path() -> PathBuf {
generate_path("registry")
2018-03-14 15:17:44 +00:00
}
pub fn registry_url() -> Url {
generate_url("registry")
2018-03-14 15:17:44 +00:00
}
2019-02-03 04:01:23 +00:00
/// Gets the path for local web API uploads. Cargo will place the contents of a web API
/// request here. For example, `api/v1/crates/new` is the result of publishing a crate.
2018-07-17 02:20:39 +00:00
pub fn api_path() -> PathBuf {
generate_path("api")
2018-07-17 02:20:39 +00:00
}
pub fn api_url() -> Url {
generate_url("api")
}
2019-02-03 04:01:23 +00:00
/// Gets the path where crates can be downloaded using the web API endpoint. Crates
/// should be organized as `{name}/{version}/download` to match the web API
/// endpoint. This is rarely used and must be manually set up.
2018-03-14 15:17:44 +00:00
pub fn dl_path() -> PathBuf {
generate_path("dl")
2018-03-14 15:17:44 +00:00
}
pub fn dl_url() -> Url {
generate_url("dl")
2018-03-14 15:17:44 +00:00
}
2019-02-03 04:01:23 +00:00
/// Gets the alternative-registry version of `registry_path`.
2018-03-14 15:17:44 +00:00
pub fn alt_registry_path() -> PathBuf {
generate_path("alternative-registry")
2018-03-14 15:17:44 +00:00
}
pub fn alt_registry_url() -> Url {
generate_url("alternative-registry")
2018-03-14 15:17:44 +00:00
}
2019-02-03 04:01:23 +00:00
/// Gets the alternative-registry version of `dl_path`.
2018-03-14 15:17:44 +00:00
pub fn alt_dl_path() -> PathBuf {
generate_path("alt_dl")
2018-03-14 15:17:44 +00:00
}
pub fn alt_dl_url() -> String {
generate_alt_dl_url("alt_dl")
}
2019-02-03 04:01:23 +00:00
/// Gets the alternative-registry version of `api_path`.
2018-03-14 15:17:44 +00:00
pub fn alt_api_path() -> PathBuf {
generate_path("alt_api")
2018-03-14 15:17:44 +00:00
}
pub fn alt_api_url() -> Url {
generate_url("alt_api")
2018-03-14 15:17:44 +00:00
}
2019-12-14 03:42:00 +00:00
pub fn generate_path(name: &str) -> PathBuf {
2019-12-14 03:42:00 +00:00
paths::root().join(name)
}
2019-12-17 00:59:45 +00:00
pub fn generate_url(name: &str) -> Url {
2019-12-14 03:42:00 +00:00
Url::from_file_path(generate_path(name)).ok().unwrap()
}
pub fn generate_alt_dl_url(name: &str) -> String {
2019-12-14 03:42:00 +00:00
let base = Url::from_file_path(generate_path(name)).ok().unwrap();
format!("{}/{{crate}}/{{version}}/{{crate}}-{{version}}.crate", base)
}
/// A builder for initializing registries.
pub struct RegistryBuilder {
/// If `true`, adds source replacement for crates.io to a registry on the filesystem.
replace_crates_io: bool,
/// If `true`, configures a registry named "alternative".
alternative: bool,
/// If set, sets the API url for the "alternative" registry.
/// This defaults to a directory on the filesystem.
alt_api_url: Option<String>,
/// If `true`, configures `.cargo/credentials` with some tokens.
add_tokens: bool,
}
impl RegistryBuilder {
pub fn new() -> RegistryBuilder {
RegistryBuilder {
replace_crates_io: true,
alternative: false,
alt_api_url: None,
add_tokens: true,
}
}
/// Sets whether or not to replace crates.io with a registry on the filesystem.
/// Default is `true`.
pub fn replace_crates_io(&mut self, replace: bool) -> &mut Self {
self.replace_crates_io = replace;
self
}
/// Sets whether or not to initialize an alternative registry named "alternative".
/// Default is `false`.
pub fn alternative(&mut self, alt: bool) -> &mut Self {
self.alternative = alt;
self
}
/// Sets the API url for the "alternative" registry.
/// Defaults to a path on the filesystem ([`alt_api_path`]).
pub fn alternative_api_url(&mut self, url: &str) -> &mut Self {
self.alternative = true;
self.alt_api_url = Some(url.to_string());
self
}
/// Sets whether or not to initialize `.cargo/credentials` with some tokens.
/// Defaults to `true`.
pub fn add_tokens(&mut self, add: bool) -> &mut Self {
self.add_tokens = add;
self
}
/// Initializes the registries.
pub fn build(&self) {
let config_path = paths::home().join(".cargo/config");
if config_path.exists() {
panic!(
"{} already exists, the registry may only be initialized once, \
and must be done before the config file is created",
config_path.display()
);
}
t!(fs::create_dir_all(config_path.parent().unwrap()));
let mut config = String::new();
if self.replace_crates_io {
write!(
&mut config,
"
[source.crates-io]
replace-with = 'dummy-registry'
[source.dummy-registry]
registry = '{}'
",
registry_url()
)
.unwrap();
}
if self.alternative {
write!(
config,
"
[registries.alternative]
index = '{}'
",
alt_registry_url()
)
.unwrap();
}
t!(fs::write(&config_path, config));
if self.add_tokens {
let credentials = paths::home().join(".cargo/credentials");
t!(fs::write(
&credentials,
r#"
[registry]
token = "api-token"
[registries.alternative]
token = "api-token"
"#
));
}
if self.replace_crates_io {
init_registry(registry_path(), dl_url().into(), api_url(), api_path());
}
if self.alternative {
init_registry(
alt_registry_path(),
alt_dl_url(),
self.alt_api_url
.as_ref()
2021-04-13 16:02:07 +00:00
.map_or_else(alt_api_url, |url| Url::parse(url).expect("valid url")),
alt_api_path(),
);
}
}
/// Initializes the registries, and sets up an HTTP server for the
/// "alternative" registry.
///
/// The given callback takes a `Vec` of headers when a request comes in.
/// The first entry should be the HTTP command, such as
/// `PUT /api/v1/crates/new HTTP/1.1`.
///
/// The callback should return the HTTP code for the response, and the
/// response body.
///
/// This method returns a `JoinHandle` which you should call
/// `.join().unwrap()` on before exiting the test.
pub fn build_api_server<'a>(
&mut self,
handler: &'static (dyn (Fn(Vec<String>) -> (u32, &'a dyn AsRef<[u8]>)) + Sync),
) -> thread::JoinHandle<()> {
let server = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = server.local_addr().unwrap();
let api_url = format!("http://{}", addr);
self.replace_crates_io(false)
.alternative_api_url(&api_url)
.build();
let t = thread::spawn(move || {
let mut conn = BufReader::new(server.accept().unwrap().0);
let headers: Vec<_> = (&mut conn)
.lines()
.map(|s| s.unwrap())
.take_while(|s| s.len() > 2)
.map(|s| s.trim().to_string())
.collect();
let (code, response) = handler(headers);
let response = response.as_ref();
let stream = conn.get_mut();
write!(
stream,
"HTTP/1.1 {}\r\n\
Content-Length: {}\r\n\
\r\n",
code,
response.len()
)
.unwrap();
stream.write_all(response).unwrap();
});
t
}
}
/// A builder for creating a new package in a registry.
///
/// This uses "source replacement" using an automatically generated
/// `.cargo/config` file to ensure that dependencies will use these packages
/// instead of contacting crates.io. See `source-replacement.md` for more
/// details on how source replacement works.
///
/// Call `publish` to finalize and create the package.
///
/// If no files are specified, an empty `lib.rs` file is automatically created.
///
/// The `Cargo.toml` file is automatically generated based on the methods
/// called on `Package` (for example, calling `dep()` will add to the
/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file
/// to override the generated one.
///
/// This supports different registry types:
/// - Regular source replacement that replaces `crates.io` (the default).
/// - A "local registry" which is a subset for vendoring (see
/// `Package::local`).
/// - An "alternative registry" which requires specifying the registry name
/// (see `Package::alternative`).
///
/// This does not support "directory sources". See `directory.rs` for
/// `VendorPackage` which implements directory sources.
///
/// # Example
/// ```
/// // Publish package "a" depending on "b".
/// Package::new("a", "1.0.0")
/// .dep("b", "1.0.0")
/// .file("src/lib.rs", r#"
/// extern crate b;
/// pub fn f() -> i32 { b::f() * 2 }
/// "#)
/// .publish();
///
/// // Publish package "b".
/// Package::new("b", "1.0.0")
/// .file("src/lib.rs", r#"
/// pub fn f() -> i32 { 12 }
/// "#)
/// .publish();
///
/// // Create a project that uses package "a".
/// let p = project()
/// .file("Cargo.toml", r#"
/// [package]
/// name = "foo"
/// version = "0.0.1"
///
/// [dependencies]
/// a = "1.0"
/// "#)
/// .file("src/main.rs", r#"
/// extern crate a;
/// fn main() { println!("{}", a::f()); }
/// "#)
/// .build();
///
/// p.cargo("run").with_stdout("24").run();
/// ```
2019-04-02 20:27:42 +00:00
#[must_use]
pub struct Package {
name: String,
vers: String,
deps: Vec<Dependency>,
files: Vec<PackageFile>,
yanked: bool,
2021-02-10 19:15:19 +00:00
features: FeatureMap,
local: bool,
alternative: bool,
invalid_json: bool,
proc_macro: bool,
2020-05-22 15:38:40 +00:00
links: Option<String>,
rust_version: Option<String>,
cargo_features: Vec<String>,
2021-02-10 18:58:07 +00:00
v: Option<u32>,
}
2021-02-10 19:15:19 +00:00
type FeatureMap = BTreeMap<String, Vec<String>>;
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
#[derive(Clone)]
pub struct Dependency {
name: String,
vers: String,
kind: String,
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
artifact: Option<(String, Option<String>)>,
target: Option<String>,
features: Vec<String>,
registry: Option<String>,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
package: Option<String>,
optional: bool,
}
/// A file to be created in a package.
struct PackageFile {
path: String,
contents: String,
/// The Unix mode for the file. Note that when extracted on Windows, this
/// is mostly ignored since it doesn't have the same style of permissions.
mode: u32,
/// If `true`, the file is created in the root of the tarfile, used for
/// testing invalid packages.
extra: bool,
}
const DEFAULT_MODE: u32 = 0o644;
/// Initializes the on-disk registry and sets up the config so that crates.io
/// is replaced with the one on disk.
Add sha256 checksums to the lockfile This commit changes how lock files are encoded by checksums for each package in the lockfile to the `[metadata]` section. The previous commit implemented the ability to redirect sources, but the core assumption there was that a package coming from two different locations was always the same. An inevitable case, however, is that a source gets corrupted or, worse, ships a modified version of a crate to introduce instability between two "mirrors". The purpose of adding checksums will be to resolve this discrepancy. Each crate coming from crates.io will now record its sha256 checksum in the lock file. When a lock file already exists, the new checksum for a crate will be checked against it, and if they differ compilation will be aborted. Currently only registry crates will have sha256 checksums listed, all other sources do not have checksums at this time. The astute may notice that if the lock file format is changing, then a lock file generated by a newer Cargo might be mangled by an older Cargo. In anticipation of this, however, all Cargo versions published support a `[metadata]` section of the lock file which is transparently carried forward if encountered. This means that older Cargos compiling with a newer lock file will not verify checksums in the lock file, but they will carry forward the checksum information and prevent it from being removed. There are, however, a few situations where problems may still arise: 1. If an older Cargo takes a newer lockfile (with checksums) and updates it with a modified `Cargo.toml` (e.g. a package was added, removed, or updated), then the `[metadata]` section will not be updated appropriately. This modification would require a newer Cargo to come in and update the checksums for such a modification. 2. Today Cargo can only calculate checksums for registry sources, but we may eventually want to support other sources like git (or just straight-up path sources). If future Cargo implements support for this sort of checksum, then it's the same problem as above where older Cargos will not know how to keep the checksum in sync
2016-02-03 19:04:07 +00:00
pub fn init() {
let config = paths::home().join(".cargo/config");
if config.exists() {
2018-03-14 15:17:44 +00:00
return;
}
RegistryBuilder::new().build();
}
/// Variant of `init` that initializes the "alternative" registry.
pub fn alt_init() {
RegistryBuilder::new().alternative(true).build();
2019-12-14 03:42:00 +00:00
}
2022-03-09 22:10:22 +00:00
pub struct RegistryServer {
done: Arc<AtomicBool>,
server: Option<thread::JoinHandle<()>>,
addr: SocketAddr,
}
impl RegistryServer {
pub fn addr(&self) -> SocketAddr {
self.addr
}
}
impl Drop for RegistryServer {
fn drop(&mut self) {
self.done.store(true, Ordering::SeqCst);
// NOTE: we can't actually await the server since it's blocked in accept()
let _ = self.server.take();
}
}
#[must_use]
pub fn serve_registry(registry_path: PathBuf) -> RegistryServer {
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = listener.local_addr().unwrap();
let done = Arc::new(AtomicBool::new(false));
let done2 = done.clone();
let t = thread::spawn(move || {
let mut line = String::new();
'server: while !done2.load(Ordering::SeqCst) {
let (socket, _) = listener.accept().unwrap();
// Let's implement a very naive static file HTTP server.
let mut buf = BufReader::new(socket);
// First, the request line:
// GET /path HTTPVERSION
line.clear();
if buf.read_line(&mut line).unwrap() == 0 {
// Connection terminated.
continue;
}
assert!(line.starts_with("GET "), "got non-GET request: {}", line);
let path = PathBuf::from(
line.split_whitespace()
.skip(1)
.next()
.unwrap()
.trim_start_matches('/'),
);
let file = registry_path.join(path);
if file.exists() {
// Grab some other headers we may care about.
let mut if_modified_since = None;
let mut if_none_match = None;
loop {
line.clear();
if buf.read_line(&mut line).unwrap() == 0 {
continue 'server;
}
if line == "\r\n" {
// End of headers.
line.clear();
break;
}
let value = line
.splitn(2, ':')
.skip(1)
.next()
.map(|v| v.trim())
.unwrap();
if line.starts_with("If-Modified-Since:") {
if_modified_since = Some(value.to_owned());
} else if line.starts_with("If-None-Match:") {
if_none_match = Some(value.trim_matches('"').to_owned());
}
}
// Now grab info about the file.
let data = fs::read(&file).unwrap();
let etag = Sha256::new().update(&data).finish_hex();
let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap());
// Start to construct our response:
let mut any_match = false;
let mut all_match = true;
if let Some(expected) = if_none_match {
if etag != expected {
all_match = false;
} else {
any_match = true;
}
}
if let Some(expected) = if_modified_since {
// NOTE: Equality comparison is good enough for tests.
if last_modified != expected {
all_match = false;
} else {
any_match = true;
}
}
// Write out the main response line.
if any_match && all_match {
buf.get_mut()
.write_all(b"HTTP/1.1 304 Not Modified\r\n")
.unwrap();
} else {
buf.get_mut().write_all(b"HTTP/1.1 200 OK\r\n").unwrap();
}
// TODO: Support 451 for crate index deletions.
// Write out other headers.
buf.get_mut()
.write_all(format!("Content-Length: {}\r\n", data.len()).as_bytes())
.unwrap();
buf.get_mut()
.write_all(format!("ETag: \"{}\"\r\n", etag).as_bytes())
.unwrap();
buf.get_mut()
.write_all(format!("Last-Modified: {}\r\n", last_modified).as_bytes())
.unwrap();
// And finally, write out the body.
buf.get_mut().write_all(b"\r\n").unwrap();
buf.get_mut().write_all(&data).unwrap();
} else {
loop {
line.clear();
if buf.read_line(&mut line).unwrap() == 0 {
// Connection terminated.
continue 'server;
}
if line == "\r\n" {
break;
}
}
buf.get_mut()
.write_all(b"HTTP/1.1 404 Not Found\r\n\r\n")
.unwrap();
buf.get_mut().write_all(b"\r\n").unwrap();
}
buf.get_mut().flush().unwrap();
}
});
RegistryServer {
addr,
server: Some(t),
done,
}
}
/// Creates a new on-disk registry.
2019-12-17 05:12:27 +00:00
pub fn init_registry(registry_path: PathBuf, dl_url: String, api_url: Url, api_path: PathBuf) {
// Initialize a new registry.
repo(&registry_path)
.file(
"config.json",
&format!(r#"{{"dl":"{}","api":"{}"}}"#, dl_url, api_url),
)
.build();
2019-12-17 05:12:27 +00:00
fs::create_dir_all(api_path.join("api/v1/crates")).unwrap();
}
impl Package {
2019-02-03 04:01:23 +00:00
/// Creates a new package builder.
/// Call `publish()` to finalize and build the package.
pub fn new(name: &str, vers: &str) -> Package {
init();
Package {
name: name.to_string(),
vers: vers.to_string(),
deps: Vec::new(),
files: Vec::new(),
yanked: false,
2021-02-10 19:15:19 +00:00
features: BTreeMap::new(),
local: false,
alternative: false,
invalid_json: false,
proc_macro: false,
2020-05-22 15:38:40 +00:00
links: None,
rust_version: None,
cargo_features: Vec::new(),
2021-02-10 18:58:07 +00:00
v: None,
}
}
/// Call with `true` to publish in a "local registry".
///
/// See `source-replacement.html#local-registry-sources` for more details
/// on local registries. See `local_registry.rs` for the tests that use
/// this.
pub fn local(&mut self, local: bool) -> &mut Package {
self.local = local;
self
}
/// Call with `true` to publish in an "alternative registry".
///
/// The name of the alternative registry is called "alternative".
///
2019-02-11 23:16:13 +00:00
/// See `src/doc/src/reference/registries.md` for more details on
/// alternative registries. See `alt_registry.rs` for the tests that use
/// this.
pub fn alternative(&mut self, alternative: bool) -> &mut Package {
self.alternative = alternative;
self
}
2019-02-03 04:01:23 +00:00
/// Adds a file to the package.
pub fn file(&mut self, name: &str, contents: &str) -> &mut Package {
self.file_with_mode(name, DEFAULT_MODE, contents)
}
/// Adds a file with a specific Unix mode.
pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package {
self.files.push(PackageFile {
path: path.to_string(),
contents: contents.to_string(),
mode,
extra: false,
});
self
}
2019-02-03 04:01:23 +00:00
/// Adds an "extra" file that is not rooted within the package.
///
/// Normal files are automatically placed within a directory named
/// `$PACKAGE-$VERSION`. This allows you to override that behavior,
/// typically for testing invalid behavior.
pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package {
self.files.push(PackageFile {
path: path.to_string(),
contents: contents.to_string(),
mode: DEFAULT_MODE,
extra: true,
});
self
}
2019-02-03 04:01:23 +00:00
/// Adds a normal dependency. Example:
/// ```
/// [dependencies]
/// foo = {version = "1.0"}
/// ```
pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(&Dependency::new(name, vers))
}
2019-02-03 04:01:23 +00:00
/// Adds a dependency with the given feature. Example:
/// ```
/// [dependencies]
/// foo = {version = "1.0", "features": ["feat1", "feat2"]}
/// ```
2018-03-14 15:17:44 +00:00
pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).enable_features(features))
}
2019-02-03 04:01:23 +00:00
/// Adds a platform-specific dependency. Example:
/// ```
/// [target.'cfg(windows)'.dependencies]
/// foo = {version = "1.0"}
/// ```
2018-03-14 15:17:44 +00:00
pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).target(target))
}
2019-02-03 04:01:23 +00:00
/// Adds a dependency to the alternative registry.
pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package {
self.add_dep(Dependency::new(name, vers).registry("alternative"))
}
2019-02-03 04:01:23 +00:00
/// Adds a dev-dependency. Example:
/// ```
/// [dev-dependencies]
/// foo = {version = "1.0"}
/// ```
pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).dev())
}
2019-02-03 04:01:23 +00:00
/// Adds a build-dependency. Example:
/// ```
/// [build-dependencies]
/// foo = {version = "1.0"}
/// ```
pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package {
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
self.add_dep(Dependency::new(name, vers).build())
}
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package {
self.deps.push(dep.clone());
self
}
2019-02-03 04:01:23 +00:00
/// Specifies whether or not the package is "yanked".
pub fn yanked(&mut self, yanked: bool) -> &mut Package {
self.yanked = yanked;
self
}
/// Specifies whether or not this is a proc macro.
pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package {
self.proc_macro = proc_macro;
self
}
2019-02-03 04:01:23 +00:00
/// Adds an entry in the `[features]` section.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package {
let deps = deps.iter().map(|s| s.to_string()).collect();
self.features.insert(name.to_string(), deps);
self
}
/// Specify a minimal Rust version.
pub fn rust_version(&mut self, rust_version: &str) -> &mut Package {
self.rust_version = Some(rust_version.into());
self
}
/// Causes the JSON line emitted in the index to be invalid, presumably
/// causing Cargo to skip over this version.
pub fn invalid_json(&mut self, invalid: bool) -> &mut Package {
self.invalid_json = invalid;
self
}
2020-05-22 15:38:40 +00:00
pub fn links(&mut self, links: &str) -> &mut Package {
self.links = Some(links.to_string());
self
}
pub fn cargo_feature(&mut self, feature: &str) -> &mut Package {
self.cargo_features.push(feature.to_owned());
self
}
2021-02-10 18:58:07 +00:00
/// Sets the index schema version for this package.
///
2021-03-20 21:38:34 +00:00
/// See `cargo::sources::registry::RegistryPackage` for more information.
2021-02-10 18:58:07 +00:00
pub fn schema_version(&mut self, version: u32) -> &mut Package {
self.v = Some(version);
self
}
2019-02-03 04:01:23 +00:00
/// Creates the package and place it in the registry.
///
/// This does not actually use Cargo's publishing system, but instead
/// manually creates the entry in the registry on the filesystem.
///
/// Returns the checksum for the package.
2016-07-05 17:28:51 +00:00
pub fn publish(&self) -> String {
self.make_archive();
2019-02-03 04:01:23 +00:00
// Figure out what we're going to write into the index.
let deps = self
.deps
2018-03-14 15:17:44 +00:00
.iter()
.map(|dep| {
// In the index, the `registry` is null if it is from the same registry.
// In Cargo.toml, it is None if it is from crates.io.
2020-03-16 01:51:38 +00:00
let registry_url = match (self.alternative, dep.registry.as_deref()) {
(false, None) => None,
(false, Some("alternative")) => Some(alt_registry_url().to_string()),
(true, None) => {
Some("https://github.com/rust-lang/crates.io-index".to_string())
}
2020-03-16 01:51:38 +00:00
(true, Some("alternative")) => None,
_ => panic!("registry_dep currently only supports `alternative`"),
};
serde_json::json!({
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
"name": dep.name,
"req": dep.vers,
"features": dep.features,
"default_features": true,
"target": dep.target,
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
"artifact": dep.artifact,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
"optional": dep.optional,
"kind": dep.kind,
"registry": registry_url,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
"package": dep.package,
})
2018-12-08 11:19:47 +00:00
})
.collect::<Vec<_>>();
let cksum = {
let c = t!(fs::read(&self.archive_dst()));
cksum(&c)
};
let name = if self.invalid_json {
serde_json::json!(1)
} else {
serde_json::json!(self.name)
};
2021-02-10 19:15:19 +00:00
// This emulates what crates.io may do in the future.
let (features, features2) = split_index_features(self.features.clone());
2021-02-10 18:58:07 +00:00
let mut json = serde_json::json!({
"name": name,
"vers": self.vers,
"deps": deps,
"cksum": cksum,
2021-02-10 19:15:19 +00:00
"features": features,
"yanked": self.yanked,
2020-05-22 15:38:40 +00:00
"links": self.links,
2021-02-10 18:58:07 +00:00
});
2021-02-10 19:15:19 +00:00
if let Some(f2) = &features2 {
json["features2"] = serde_json::json!(f2);
json["v"] = serde_json::json!(2);
}
2021-02-10 18:58:07 +00:00
if let Some(v) = self.v {
json["v"] = serde_json::json!(v);
}
let line = json.to_string();
let file = make_dep_path(&self.name, false);
2018-03-14 15:17:44 +00:00
let registry_path = if self.alternative {
alt_registry_path()
} else {
registry_path()
};
2019-02-03 04:01:23 +00:00
// Write file/line in the index.
let dst = if self.local {
registry_path.join("index").join(&file)
} else {
registry_path.join(&file)
};
2020-04-30 22:48:38 +00:00
let prev = fs::read_to_string(&dst).unwrap_or_default();
t!(fs::create_dir_all(dst.parent().unwrap()));
t!(fs::write(&dst, prev + &line[..] + "\n"));
2019-02-03 04:01:23 +00:00
// Add the new file to the index.
if !self.local {
let repo = t!(git2::Repository::open(&registry_path));
let mut index = t!(repo.index());
t!(index.add_path(Path::new(&file)));
t!(index.write());
let id = t!(index.write_tree());
2019-02-03 04:01:23 +00:00
// Commit this change.
let tree = t!(repo.find_tree(id));
let sig = t!(repo.signature());
let parent = t!(repo.refname_to_id("refs/heads/master"));
let parent = t!(repo.find_commit(parent));
2018-03-14 15:17:44 +00:00
t!(repo.commit(
Some("HEAD"),
&sig,
&sig,
"Another commit",
&tree,
&[&parent]
));
}
2016-07-05 17:28:51 +00:00
cksum
}
fn make_archive(&self) {
let dst = self.archive_dst();
t!(fs::create_dir_all(dst.parent().unwrap()));
let f = t!(File::create(&dst));
let mut a = Builder::new(GzEncoder::new(f, Compression::default()));
if !self
.files
.iter()
.any(|PackageFile { path, .. }| path == "Cargo.toml")
{
self.append_manifest(&mut a);
}
if self.files.is_empty() {
self.append(&mut a, "src/lib.rs", DEFAULT_MODE, "");
} else {
for PackageFile {
path,
contents,
mode,
extra,
} in &self.files
{
if *extra {
self.append_raw(&mut a, path, *mode, contents);
} else {
self.append(&mut a, path, *mode, contents);
}
}
}
}
fn append_manifest<W: Write>(&self, ar: &mut Builder<W>) {
let mut manifest = String::new();
if !self.cargo_features.is_empty() {
manifest.push_str(&format!(
"cargo-features = {}\n\n",
toml_edit::ser::to_item(&self.cargo_features).unwrap()
));
}
manifest.push_str(&format!(
2018-03-14 15:17:44 +00:00
r#"
[package]
name = "{}"
version = "{}"
authors = []
2018-03-14 15:17:44 +00:00
"#,
self.name, self.vers
));
if let Some(version) = &self.rust_version {
manifest.push_str(&format!("rust-version = \"{}\"", version));
}
for dep in self.deps.iter() {
let target = match dep.target {
None => String::new(),
Some(ref s) => format!("target.'{}'.", s),
};
let kind = match &dep.kind[..] {
"build" => "build-",
"dev" => "dev-",
2018-03-14 15:17:44 +00:00
_ => "",
};
2018-03-14 15:17:44 +00:00
manifest.push_str(&format!(
r#"
[{}{}dependencies.{}]
version = "{}"
2018-03-14 15:17:44 +00:00
"#,
target, kind, dep.name, dep.vers
));
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
if let Some((artifact, target)) = &dep.artifact {
manifest.push_str(&format!("artifact = \"{}\"\n", artifact));
if let Some(target) = &target {
manifest.push_str(&format!("target = \"{}\"\n", target))
}
}
if let Some(registry) = &dep.registry {
2018-12-31 00:07:58 +00:00
assert_eq!(registry, "alternative");
manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url()));
}
}
if self.proc_macro {
manifest.push_str("[lib]\nproc-macro = true\n");
}
self.append(ar, "Cargo.toml", DEFAULT_MODE, &manifest);
}
fn append<W: Write>(&self, ar: &mut Builder<W>, file: &str, mode: u32, contents: &str) {
self.append_raw(
2018-03-14 15:17:44 +00:00
ar,
&format!("{}-{}/{}", self.name, self.vers, file),
mode,
2018-03-14 15:17:44 +00:00
contents,
);
}
fn append_raw<W: Write>(&self, ar: &mut Builder<W>, path: &str, mode: u32, contents: &str) {
let mut header = Header::new_ustar();
header.set_size(contents.len() as u64);
t!(header.set_path(path));
header.set_mode(mode);
header.set_cksum();
t!(ar.append(&header, contents.as_bytes()));
}
/// Returns the path to the compressed package file.
pub fn archive_dst(&self) -> PathBuf {
if self.local {
2018-03-14 15:17:44 +00:00
registry_path().join(format!("{}-{}.crate", self.name, self.vers))
} else if self.alternative {
alt_dl_path()
.join(&self.name)
.join(&self.vers)
.join(&format!("{}-{}.crate", self.name, self.vers))
} else {
dl_path().join(&self.name).join(&self.vers).join("download")
}
}
}
2016-07-05 17:28:51 +00:00
pub fn cksum(s: &[u8]) -> String {
Import the cargo-vendor subcommand into Cargo This commit imports the external [alexcrichton/cargo-vendor repository][repo] into Cargo itself. This means it will no longer be necessary to install the `cargo-vendor` subcommand in order to vendor dependencies. Additionally it'll always support the latest feature set of Cargo as it'll be built into Cargo! All tests were imported as part of this commit, but not all features were imported. Some flags have been left out that were added later in the lifetime of `cargo vendor` which seem like they're more questionable to stabilize. I'm hoping that they can have separate PRs adding their implementation here, and we can make a decision of their stabilization at a later date. The current man page for `cargo vendor -h` will look like: cargo-vendor Vendor all dependencies for a project locally USAGE: cargo vendor [OPTIONS] [--] [path] OPTIONS: -q, --quiet No output printed to stdout --manifest-path <PATH> Path to Cargo.toml --no-delete Don't delete older crates in the vendor directory -s, --sync <TOML>... Additional `Cargo.toml` to sync and vendor --respect-source-config Respect `[source]` config in `.cargo/config` -v, --verbose Use verbose output (-vv very verbose/build.rs output) --color <WHEN> Coloring: auto, always, never --frozen Require Cargo.lock and cache are up to date --locked Require Cargo.lock is up to date -Z <FLAG>... Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details -h, --help Prints help information ARGS: <path> Where to vendor crates (`vendor` by default) This cargo subcommand will vendor all crates.io and git dependencies for a project into the specified directory at `<path>`. After this command completes the vendor directory specified by `<path>` will contain all remote sources from dependencies specified. Additionally manifest beyond the default one can be specified with the `-s` option. The `cargo vendor` command will also print out the configuration necessary to use the vendored sources, which when needed is then encoded into `.cargo/config`. Since this change is not importing 100% of the functionality of the existing `cargo vendor` this change does run a risk of being a breaking change for any folks using such functionality. Executing `cargo vendor` will favor the built-in command rather than an external subcommand, causing unimplemented features to become errors about flag usage. [repo]: https://github.com/alexcrichton/cargo-vendor
2019-04-23 00:54:27 +00:00
Sha256::new().update(s).finish_hex()
}
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
impl Dependency {
pub fn new(name: &str, vers: &str) -> Dependency {
Dependency {
name: name.to_string(),
vers: vers.to_string(),
kind: "normal".to_string(),
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
artifact: None,
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
target: None,
features: Vec::new(),
package: None,
optional: false,
registry: None,
}
}
2019-02-03 04:01:23 +00:00
/// Changes this to `[build-dependencies]`.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn build(&mut self) -> &mut Self {
self.kind = "build".to_string();
self
}
2019-02-03 04:01:23 +00:00
/// Changes this to `[dev-dependencies]`.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn dev(&mut self) -> &mut Self {
self.kind = "dev".to_string();
self
}
2019-02-03 04:01:23 +00:00
/// Changes this to `[target.$target.dependencies]`.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn target(&mut self, target: &str) -> &mut Self {
self.target = Some(target.to_string());
self
}
add support for artifact dependencies (#9096) Tracking issue: https://github.com/rust-lang/cargo/issues/9096 Original PR: https://github.com/rust-lang/cargo/pull/9992 Add 'bindeps' -Z flag for later use A test to validate artifact dependencies aren't currently parsed. Parse 'artifact' and 'lib' fields. Note that this isn't behind a feature toggle so 'unused' messages will disappear. Transfer artifact dependencies from toml- into manifest-dependencies There are a few premises governing the operation. - if unstable features are not set, warn when 'artifact' or 'lib' is encountered. - bail if 'lib' is encountered alone, but warn that this WOULD happen with nightly. - artifact parsing checks for all invariants, but some aren't tested. Assure serialization of 'artifact' and 'lib' fields produces suitable values during publishing This should be the only place were these fields matter and where a cargo manifest is actually produced. These are only for internal use, no user is typically going to see or edit them. Place all artifact dependency tests inta their own module This facilitates deduplication later and possibly redistribution into other modules if there is a better fit. Represent artifacts that are rust libraries as another ArtifactKind This is more consistent and probably simpler for later use. No need to reflect the TOML data structure. Add tests to assure only 'lib = true' artifact deps are documented RFC-3028 doesn't talk about documentation, but for lib=true it's clear what the desired behaviour should be. If an artifact isn't a library though, then for now, it's transparent, maybe. Many more tests, more documentation, mild `Artifact` refactor The latter seems to be a better fit for what being an artifact really means within cargo, as it literally turns being a library on or off, and thus only optionally becoming a normal library. refactor to prepare for artifact related checks Don't show a no-lib warning for artifact dependencies (with lib = false) Tests for more artifact dependency invariants These are merely a proof of concept to show that we are not in a position to actually figure out everything about artifacts right after resolution. However, the error message looks more like a fatal error and less like something that can happen with a more elaborate error message with causes. This might show that these kind of checks might be better done later right before trying to use the information for create compile units. Validate that artifact deps with lib=true still trigger no-lib warnings This triggers the same warning as before, for now without any customization to indicate it's an artifact dependency. Use warnings instead of errors ------------------------------ This avoids the kind of harsh end of compilation in favor of something that can be recovered from. Since warnings are annoying, users will probably avoid re-declaring artifact dependencies. Hook in artifact dependencies into build script runs Even though we would still have to see what happens if they have a lib as well. Is it built twice? Also ---- - fly-by refactor: fix typo; use ? in method returning option - Propagate artifact information into Units; put artifacts into place This means artifacts now have their own place in the 'artifact' directory and uplifts won't happen for them. - refactor and fix cippy suggestion - fix build after rebasing onto master Create directories when executing the job, and not when preparing it. also: Get CI to work on windows the easy way, for now. Set directories for artifact dependencies in build script runtimes Test remaining kinds of build-script runtime environment variables Also ---- - Fix windows tests, the quick way. - Try to fix windows assertions, and generalize them - Fix second test for windows, hopefully test for available library dependency in build scripts with lib = true probably generally exclude all artifact dependencies with lib=false. Pass renamed dep names along with unit deps to allow proper artifact env names Test for selective bin:<name> syntax, as well as binaries with dashes Test to assure dependency names are transformed correctly assure advertised binaries and directories are actually present This wouldn't be the case if dependencies are not setup correctly, for instance. Also ---- - make it easier to see actual values even on failure This should help figure out why on CI something fails that works locally no matter what. Turns out this is a race condition, with my machine being on the good side of it so it doesn't show in testing. Fortunately it still can be reproduced and easily tested for. - refactor test; the race condition is still present though - Force CI to pass here by avoiding checks triggering race. - Fix windows build, maybe? More tolerant is_file() checks to account for delay on CI This _should_ help CI to test for the presence which is better than not testing at all. This appears to be needed as the output file isn't ready/present in time for some reason. The root cause of this issue is unknown, but it's definitely a race as it rarely happens locally. When it happened, the file was always present after the run. Now we will learn if it is truly not present, ever, or if it's maybe something very else. Validate libs also don't see artifact dependencies as libraries with lib=false Also ---- - Add prelimiary test for validating build-time artifacts - Try to fix CI on gnu windows Which apparently generates paths similar to linux, but with .exe suffix. The current linux patterns should match that. - refactor Help sharing code across modules allow rustc to use artifact dep environment variables, but… …it needs some adjustments to actually setup the unit dependency graph with artifacts as well. Right now it will only setup dependencies for artifacts that are libs, but not the artifacts themselves, completely ignoring them when they are not libs. Make artifact dependencies available in main loop This is the commit message #2: ------------------------------ rough cut of support for artifact dependencies at build time… …which unfortunately already shows that the binary it is supposed to include is reproducibly not ready in time even though the path is correct and it's present right after the run. Could it be related to rmeta? This is the commit message #3: ------------------------------ Fix test expectations as failure is typical than the warning we had before… …and add some tolerance to existing test to avoid occasional failures. This doesn't change the issue that it also doens't work at all for libraries, which is nicely reproducable and hopefully helps to fix this issue. This is the commit message #4: ------------------------------ Probably the fix for the dependency issue in the scheduler This means that bin() targets are now properly added to the job graph to cause proper syncing, whereas previously apparently it would still schedule binaries, but somehow consider them rmeta and thus start their dependents too early, leading to races. This is the commit message #5: ------------------------------ Don't accidentally include non-gnu windows tests in gnu windows. Support cargo doc and cargo check The major changes here are… - always compile artifacts in build mode, as we literally want the build output, always, which the dependent might rely on being present. - share code between the rather similar looking paths for rustdoc and rustc. Make artifact messages appear more in line with cargo by using backticks Also: Add first test for static lib support in build scripts build-scripts with support for cdylib and staticlib - Fix windows msvc build No need to speculate why the staticlib has hashes in the name even though nothing else. staticlib and cdylib support for libraries test staticlib and cdylibs for rustdoc as well. Also catch a seemingly untested special case/warning about the lack of linkable items, which probably shouldn't be an issue for artifacts as they are not linkable in the traditional sense. more useful test for 'cargo check' `cargo check` isn't used very consistently in tests, so when we use it we should be sure to actually try to use an artifact based feature to gain some coverage. verify that multiple versions are allowed for artifact deps as well. also: remove redundant test This is the commit message #2: ------------------------------ Properly choose which dependencies take part in artifact handling Previously it would include them very generously without considering the compatible dependency types. This is the commit message #3: ------------------------------ a more complex test which includes dev-dependencies It also shows that doc-tests don't yet work as rustdoc is run outside of the system into which we integrate right now. It should be possible to write our environment variable configuration in terms of this 'finished compilation' though, hopefully with most code reused. This is the commit message #4: ------------------------------ A first stab at storing artifact environment variables for packages… …however, it seems like the key for this isn't necessarily correct under all circumstances. Maybe it should be something more specific, don't know. This is the commit message #5: ------------------------------ Adjust key for identifying units to Metadata This one is actually unique and feels much better. This is the commit message #6: ------------------------------ Attempt to make use of artifact environment information… …but fail as the metadata won't match as the doctest unit is, of course, its separate unit. Now I wonder if its possible to find the artifact units in question that have the metadata. Properly use metadata to use artifact environment variables in doctests This is the commit message #2: ------------------------------ Add test for resolver = "2" and build dependencies Interestingly the 'host-features' flag must be set (as is seemingly documented in the flags documentation as well), even though I am not quite sure if this is the 100% correct solution. Should it rather have an entry with this flag being false in its map? Probably not… but I am not quite certain. This is the commit message #3: ------------------------------ set most if not all tests to use resolver = "2" This allows to keep it working with the most recent version while allowing to quickly test with "1" as well (which thus far was working fine). All tests I could imagine (excluding target and profiles) are working now Crossplatform tests now run on architecture aarm64 as well. More stringent negative testing Fix incorrect handling of dependency directory computation Previously it would just 'hack' the deps-dir to become something very different for artifacts. This could easily be fixed by putting the logic for artifact output directories into the right spot. A test for cargo-tree to indicate artifacts aren't handled specifically Assure build-scripts can't access artifacts at build time Actual doc-tests with access to artifact env vars All relevant parsing of `target = [..]` Next step is to actually take it into consideration. A failing test for adjusting the target for build script artifacts using --target Check for unknown artifact target triple in a place that exists for a year The first test showing that `target="target"` deps seemingly work For now only tested for build scripts, but it won't be much different for non-build dependencies. build scripts accept custom targets unconditionally Support target setting for non-build dependencies This is the commit message #2: ------------------------------ Add doc-test cross compile related test Even though there is no artifact code specific to doc testing, it's worth to try testing it with different target settings to validate it still works despite doc tests having some special caseing around target settings. This is the commit message #3: ------------------------------ A test to validate profiles work as expected for build-deps and non-build deps No change is required to make this work and artifact dependencies 'just work' based on the typical rules of their non-artifact counterarts. This is the commit message #4: ------------------------------ Adjust `cargo metadata` to deal with artifact dependencies This commit was squashed and there is probably more that changed. This is the commit message #5: ------------------------------ Show bin-only artifacts in "resolve" of metadata as well. This is the commit message #6: ------------------------------ minor refactoring during research for RFC-3176 This will soon need to return multiple extern-name/dep-name pairs. This is the commit message #7: ------------------------------ See if opt-level 3 works on win-msvc in basic profile test for artifacts This is the same value as is used in the other test of the same name, which certainly runs on windows. This is the commit message #8: ------------------------------ refactor Assure the type for targets reflect that they cannot be the host target, which removes a few unreachable!() expressions. Put `root_unit_compile_kind` into `UnitFor` Previously that wasn't done because of the unused `all_values()` method which has now been deleted as its not being used anyomre. This allows for the root unit compile kind to be passed as originally intended, instead of working around the previous lack of extendability of UnitFor due to ::all_values(). This is also the basis for better/correct feature handling once feature resolution can be depending on the artifact target as well, resulting in another extension to UnitFor for that matter. Also ---- - Fix ordering Previously the re-created target_mode was used due to the reordering in code, and who knows what kind of effects that might have (despite the test suite being OK with it). Let's put it back in place. - Deactivate test with filename collision on MSVC until RFC-3176 lands Avoid clashes with binaries called 'artifact' by putting 'artifact/' into './deps/' This commit addresses review comment https://github.com/rust-lang/cargo/pull/9992#discussion_r772939834 Don't rely on operator precedence for boolean operations Now it should be clear that no matter what the first term is, if the unit is an artifact, we should enqueue it. Replace boolean and `/*artifact*/ <bool>` with `IsArtifact::(Yes/No)` fix `doc::doc_lib_false()` test It broke due to major breakage in the way dependencies are calculated. Now we differentiate between deps computation for docs and for building. Avoid testing for doctest cross-compilation message It seems to be present on my machine, but isn't on linux and it's probably better to leave it out entirely and focus on the portions of consecutive output that we want to see at least. A test to validate features are unified across libraries and those in artifact deps in the same target Allow aarch64 MacOS to crosscompile to an easily executable alternative target That way more tests can run locally. Support for feature resolution per target The implementation is taken directly from RFC-3176 and notably lacks the 'multidep' part. Doing this definitely has the benefit of making entirely clear 'what is what' and helps to greatly reduce the scope of RFC-3176 when it's rebuilt based on the latest RF-3028, what we are implementing right now. Also ---- - A test which prooves that artifact deps with different target don't have a feature namespace yet - Add a test to validate features are namespaced by target Previously it didn't work because it relies on resolver = "2". - 'cargo metadata' test to see how artifact-deps are presented - Missed an opportunity for using the newly introduced `PackageFeaturesKey` - Use a HashMap to store name->value relations for artifact environment variables This is semantically closer to what's intended. also: Remove a by now misleading comment Prevent resolver crash if `target = "target"` is encountered in non-build dependencies A warning was emitted before, now we also apply a fix. Previously the test didn't fail as it accidentally used the old resolver, which now has been removed. Abort in parsing stage if nightly flag is not set and 'artifact' is used There is no good reason to delay errors to a later stage when code tries to use artifacts via environment variables which are not present. Change wording of warning message into what's expected for an error message remove unnecessary `Result` in `collect()` call Improve logic to warn if dependencie are ignored due to missing libraries The improvement here is to trigger correctly if any dependency of a crate is potentially a library, without having an actual library target as part of the package specification. Due to artifact dependencies it's also possible to have a dependency to the same crate of the same version, hence the package name isn't necessarily a unique name anymore. Now the name of the actual dependency in the toml file is used to alleviate this. Various small changes for readability and consistency A failing test to validate artifacts work in published crates as well Originally this should have been a test to see target acquisition works but this more pressing issue surfaced instead. Make artifacts known to the registry data (backwards compatible) Now artifacts are serialized into the registry on publish (at least if this code is actually used in the real crates-io registry) which allows the resolve stage to contain artifact information. This seems to be in line with the idea to provide cargo with all information it needs to do package resolution without downloading the actual manifest. Pick up all artifact targets into target info once resolve data is available Even though this works in the test at hand, it clearly shows there is a cyclic dependency between the resolve and the target data. In theory, one would have to repeat resolution until it settles while avoiding cycles. Maybe there is a better way. Add `bindeps`/artifact dependencies to `unstsable.md` with examples Fix tests Various small improvements Greatly simplify artifact environment propagation to commands Remove all adjustments to cargo-metadata, but leave tests The tests are to record the status quo with the current code when artifact dependencies are present and assure the information is not entirely non-sensical. Revert "Make artifacts known to the registry data (backwards compatible)" This reverts commit adc5f8ad04840af9fd06c964cfcdffb8c30769b0. Ideally we are able to make it work without altering the registry storage format. This could work if information from the package set is added to the resolve information. Enrich resolves information with additional information from downloaded manifests Resolve information comes from the registry, and it's only as rich as needed to know which packages take part in the build. Artifacts, however, don't influence dependency resolution, hence it shouldn't be part of it. For artifact information being present nonetheless when it matters, we port it back to the resolve graph where it will be needed later. Collect 'forced-target' information from non-workspace members as well This is needed as these targets aren't present in the registry and thus can't be picked up by traversing non-workspce members. The mechanism used to pick up artifact targets can also be used to pick up these targets. Remove unnecessary adjustment of doc test refactor `State::deps()` to have filter; re-enable accidentally disabled test The initial rebasing started out with a separted `deps_filtered()` method to retain the original capabilities while minimizing the chance for surprises. It turned out that the all changes combined in this PR make heavy use of filtering capabilities to the point where `deps(<without filter>)` was unused. This suggested that it's required to keep it as is without a way to inline portions of it. For the original change that triggered this rebase, see bd45ac81ba062a7daa3b0178dfcb6fd5759a943c The fix originally made was reapplied by allowing to re-use the required filter, but without inlining it. Always error on invalid artifact setup, with or without enabled bindeps feature Clarify how critical resolver code around artifact is working Remove workaround in favor of deferring a proper implementation See https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 for reference and the TODO in the ignored test for more information. truncate comments at 80-90c; cleanup - remove unused method - remove '-Z unstable-options' - improve error message - improve the way MSVC special cases are targetted in tests - improve how executables are found on non MSVC Avoid depending on output of rustc There is cyclic dependency between rustc and cargo which makes it impossible to adjust cargo's expectations on rustc without leaving broken commits in rustc and cargo. Add missing documentation fix incorrect removal of non-artifact libs This is also the first step towards cleaning up the filtering logic which is still making some logic harder to understand than needs be. The goal is to get it to be closer to what's currently on master. Another test was added to have more safety regarding the overall library inclusion logic. inline `build_artifact_requirements_to_units()` Simplify filtering This adds a default filter to `state.deps(…)` making it similar to what's currently in master, while creating another version of it to allow setting a custom filter. This is needed as the default filter won't allow build dependencies, which we need in this particular case. `calc_artifact_deps(…)` now hard-codes the default filter which is needed due to the use of `any` here: https://github.com/rust-lang/cargo/blob/c0e6abe384c2c6282bdd631e2f2a3b092043e6c6/src/cargo/core/compiler/unit_dependencies.rs#L1119 . Simplify filtering.
2021-10-21 09:57:23 +00:00
/// Change the artifact to be of the given kind, like "bin", or "staticlib",
/// along with a specific target triple if provided.
pub fn artifact(&mut self, kind: &str, target: Option<String>) -> &mut Self {
self.artifact = Some((kind.to_string(), target));
self
}
2019-02-03 04:01:23 +00:00
/// Adds `registry = $registry` to this dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn registry(&mut self, registry: &str) -> &mut Self {
self.registry = Some(registry.to_string());
self
}
2019-02-03 04:01:23 +00:00
/// Adds `features = [ ... ]` to this dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn enable_features(&mut self, features: &[&str]) -> &mut Self {
self.features.extend(features.iter().map(|s| s.to_string()));
self
}
2019-02-03 04:01:23 +00:00
/// Adds `package = ...` to this dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn package(&mut self, pkg: &str) -> &mut Self {
self.package = Some(pkg.to_string());
self
}
2019-02-03 04:01:23 +00:00
/// Changes this to an optional dependency.
Fix publishing renamed dependencies to crates.io This commit fixes publishing crates which contain locally renamed dependencies to crates.io. Previously this lack of information meant that although we could resolve the crate graph correctly it wouldn't work well with respect to optional features and optional dependencies. The fix here is to persist this information into the registry about the crate being renamed in `Cargo.toml`, allowing Cargo to correctly deduce feature names as it does when it has `Cargo.toml` locally. A dual side of this commit is to publish this information to crates.io. We'll want to merge the associated PR (link to come soon) on crates.io first and make sure that's deployed as well before we stabilize the crate renaming feature. The index format is updated as well as part of this change. The `name` key for dependencies is now unconditionally what was written in `Cargo.toml` as the left-hand-side of the dependency specification. In other words this is the raw crate name, but only for the local crate. A new key, `package`, is added to dependencies (and it can be `None`). This key indicates the crates.io package is being linked against, an represents the `package` key in `Cargo.toml`. It's important to consider the interaction with older Cargo implementations which don't support the `package` key in the index. In these situations older Cargo binaries will likely fail to resolve entirely as the renamed name is unlikely to exist on crates.io. For example the `futures` crate now has an optional dependency with the name `futures01` which depends on an older version of `futures` on crates.io. The string `futures01` will be listed in the index under the `"name"` key, but no `futures01` crate exists on crates.io so older Cargo will generate an error. If the crate does exist on crates.io, then even weirder error messages will likely result. Closes #5962
2018-09-07 16:37:06 +00:00
pub fn optional(&mut self, optional: bool) -> &mut Self {
self.optional = optional;
self
}
}
2021-02-10 19:15:19 +00:00
fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option<FeatureMap>) {
let mut features2 = FeatureMap::new();
for (feat, values) in features.iter_mut() {
if values
.iter()
.any(|value| value.starts_with("dep:") || value.contains("?/"))
{
let new_values = values.drain(..).collect();
features2.insert(feat.clone(), new_values);
}
}
if features2.is_empty() {
(features, None)
} else {
(features, Some(features2))
}
}