Basic standard library support.

This commit is contained in:
Eric Huss 2019-08-01 09:11:22 -07:00
parent 9a8d695b5b
commit 1f14fa3172
23 changed files with 1228 additions and 307 deletions

View File

@ -16,6 +16,13 @@ steps:
- bash: rustup component add clippy || echo "clippy not available"
displayName: "Install clippy (maybe)"
# This is needed for standard library tests.
- bash: |
if [ "$TOOLCHAIN" = "nightly" ]; then
rustup component add rust-src
fi
displayName: "Install rust-src (maybe)"
# Deny warnings on CI to avoid warnings getting into the codebase, and note the
# `force-system-lib-on-osx` which is intended to fix compile issues on OSX where
# compiling curl from source on OSX yields linker errors on Azure.

View File

@ -221,6 +221,14 @@ impl CompileMode {
}
}
/// Returns `true` if this is something that passes `--test` to rustc.
pub fn is_rustc_test(self) -> bool {
match self {
CompileMode::Test | CompileMode::Bench | CompileMode::Check { test: true } => true,
_ => false,
}
}
/// Returns `true` if this is the *execution* of a `build.rs` script.
pub fn is_run_custom_build(self) -> bool {
self == CompileMode::RunCustomBuild

View File

@ -8,7 +8,7 @@ use crate::core::compiler::unit::UnitInterner;
use crate::core::compiler::{BuildConfig, BuildOutput, Kind, Unit};
use crate::core::profiles::Profiles;
use crate::core::{Dependency, Workspace};
use crate::core::{PackageId, PackageSet, Resolve};
use crate::core::{PackageId, PackageSet};
use crate::util::errors::CargoResult;
use crate::util::{profile, Cfg, Config, Rustc};
@ -26,8 +26,6 @@ pub struct BuildContext<'a, 'cfg> {
pub ws: &'a Workspace<'cfg>,
/// The cargo configuration.
pub config: &'cfg Config,
/// The dependency graph for our build.
pub resolve: &'a Resolve,
pub profiles: &'a Profiles,
pub build_config: &'a BuildConfig,
/// Extra compiler args for either `rustc` or `rustdoc`.
@ -48,7 +46,6 @@ pub struct BuildContext<'a, 'cfg> {
impl<'a, 'cfg> BuildContext<'a, 'cfg> {
pub fn new(
ws: &'a Workspace<'cfg>,
resolve: &'a Resolve,
packages: &'a PackageSet<'cfg>,
config: &'cfg Config,
build_config: &'a BuildConfig,
@ -75,7 +72,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
Ok(BuildContext {
ws,
resolve,
packages,
config,
rustc,
@ -90,16 +86,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
})
}
pub fn extern_crate_name(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> CargoResult<String> {
self.resolve
.extern_crate_name(unit.pkg.package_id(), dep.pkg.package_id(), dep.target)
}
pub fn is_public_dependency(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> bool {
self.resolve
.is_public_dep(unit.pkg.package_id(), dep.pkg.package_id())
}
/// Whether a dependency should be compiled for the host or target platform,
/// specified by `Kind`.
pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool {

View File

@ -6,7 +6,7 @@ use std::path::PathBuf;
use semver::Version;
use super::BuildContext;
use crate::core::{Edition, Package, PackageId, Target};
use crate::core::{Edition, InternedString, Package, PackageId, Target};
use crate::util::{self, join_paths, process, CargoResult, CfgExpr, Config, ProcessBuilder};
pub struct Doctest {
@ -16,7 +16,7 @@ pub struct Doctest {
pub target: Target,
/// Extern dependencies needed by `rustdoc`. The path is the location of
/// the compiled lib.
pub deps: Vec<(String, PathBuf)>,
pub deps: Vec<(InternedString, PathBuf)>,
}
/// A structure returning the result of a compilation.

View File

@ -1,7 +1,6 @@
#![allow(deprecated)]
use std::collections::{BTreeSet, HashMap, HashSet};
use std::ffi::OsStr;
use std::fmt::Write;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
@ -10,7 +9,7 @@ use jobserver::Client;
use crate::core::compiler::compilation;
use crate::core::compiler::Unit;
use crate::core::{PackageId, Resolve};
use crate::core::PackageId;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::{internal, profile, Config};
@ -19,6 +18,7 @@ use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
use super::fingerprint::Fingerprint;
use super::job_queue::JobQueue;
use super::layout::Layout;
use super::unit_dependencies::{UnitDep, UnitGraph};
use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind};
mod compilation_files;
@ -48,9 +48,6 @@ pub struct Context<'a, 'cfg> {
/// Linking information for each `Unit`.
/// See `build_map` for details.
pub build_scripts: HashMap<Unit<'a>, Arc<BuildScripts>>,
/// Used to check the `links` field in the manifest is not duplicated and
/// is used correctly.
pub links: Links,
/// Job server client to manage concurrency with other processes.
pub jobserver: Client,
/// "Primary" packages are the ones the user selected on the command-line
@ -58,7 +55,7 @@ pub struct Context<'a, 'cfg> {
/// based on the current directory and the default workspace members.
primary_packages: HashSet<PackageId>,
/// The dependency graph of units to compile.
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
unit_dependencies: UnitGraph<'a>,
/// An abstraction of the files and directories that will be generated by
/// the compilation. This is `None` until after `unit_dependencies` has
/// been computed.
@ -80,7 +77,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn new(
config: &'cfg Config,
bcx: &'a BuildContext<'a, 'cfg>,
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
unit_dependencies: UnitGraph<'a>,
) -> CargoResult<Self> {
// Load up the jobserver that we'll use to manage our parallelism. This
// is the same as the GNU make implementation of a jobserver, and
@ -111,7 +108,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
compiled: HashSet::new(),
build_scripts: HashMap::new(),
build_explicit_deps: HashMap::new(),
links: Links::new(),
jobserver,
primary_packages: HashSet::new(),
unit_dependencies,
@ -201,18 +197,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// pass `--extern` for rlib deps and skip out on all other
// artifacts.
let mut doctest_deps = Vec::new();
for dep in self.dep_targets(unit) {
if dep.target.is_lib() && dep.mode == CompileMode::Build {
let outputs = self.outputs(&dep)?;
for dep in self.unit_deps(unit) {
if dep.unit.target.is_lib() && dep.unit.mode == CompileMode::Build {
let outputs = self.outputs(&dep.unit)?;
let outputs = outputs.iter().filter(|output| {
output.path.extension() == Some(OsStr::new("rlib"))
|| dep.target.for_host()
|| dep.unit.target.for_host()
});
for output in outputs {
doctest_deps.push((
self.bcx.extern_crate_name(unit, &dep)?,
output.path.clone(),
));
doctest_deps.push((dep.extern_crate_name, output.path.clone()));
}
}
}
@ -356,10 +349,19 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// For a package, return all targets which are registered as dependencies
/// for that package.
/// NOTE: This is deprecated, use `unit_deps` instead.
//
// TODO: this ideally should be `-> &[Unit<'a>]`.
pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec<Unit<'a>> {
self.unit_dependencies[unit].clone()
self.unit_dependencies[unit]
.iter()
.map(|dep| dep.unit)
.collect()
}
/// Direct dependencies for the given unit.
pub fn unit_deps(&self, unit: &Unit<'a>) -> &[UnitDep<'a>] {
&self.unit_dependencies[unit]
}
pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool {
@ -369,12 +371,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Returns the list of filenames read by cargo to generate the `BuildContext`
/// (all `Cargo.toml`, etc.).
pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
// Keep sorted for consistency.
let mut inputs = BTreeSet::new();
// Note that we're using the `package_cache`, which should have been
// populated by `build_unit_dependencies`, and only those packages are
// considered as all the inputs.
//
// (Notably, we skip dev-deps here if they aren't present.)
// Note: dev-deps are skipped if they are not present in the unit graph.
for unit in self.unit_dependencies.keys() {
inputs.insert(unit.pkg.manifest_path().to_path_buf());
}
@ -485,8 +484,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
fn record_units_requiring_metadata(&mut self) {
for (key, deps) in self.unit_dependencies.iter() {
for dep in deps {
if self.only_requires_rmeta(key, dep) {
self.rmeta_required.insert(*dep);
if self.only_requires_rmeta(key, &dep.unit) {
self.rmeta_required.insert(dep.unit);
}
}
}
@ -513,70 +512,3 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
self.rmeta_required.contains(unit)
}
}
#[derive(Default)]
pub struct Links {
validated: HashSet<PackageId>,
links: HashMap<String, PackageId>,
}
impl Links {
pub fn new() -> Links {
Links {
validated: HashSet::new(),
links: HashMap::new(),
}
}
pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'_>) -> CargoResult<()> {
if !self.validated.insert(unit.pkg.package_id()) {
return Ok(());
}
let lib = match unit.pkg.manifest().links() {
Some(lib) => lib,
None => return Ok(()),
};
if let Some(&prev) = self.links.get(lib) {
let pkg = unit.pkg.package_id();
let describe_path = |pkgid: PackageId| -> String {
let dep_path = resolve.path_to_top(&pkgid);
let mut dep_path_desc = format!("package `{}`", dep_path[0]);
for dep in dep_path.iter().skip(1) {
write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
}
dep_path_desc
};
failure::bail!(
"multiple packages link to native library `{}`, \
but a native library can be linked only once\n\
\n\
{}\nlinks to native library `{}`\n\
\n\
{}\nalso links to native library `{}`",
lib,
describe_path(prev),
lib,
describe_path(pkg),
lib
)
}
if !unit
.pkg
.manifest()
.targets()
.iter()
.any(|t| t.is_custom_build())
{
failure::bail!(
"package `{}` specifies that it links to `{}` but does not \
have a custom build script",
unit.pkg.package_id(),
lib
)
}
self.links.insert(lib.to_string(), unit.pkg.package_id());
Ok(())
}
}

View File

@ -201,7 +201,8 @@ use serde::de;
use serde::ser;
use serde::{Deserialize, Serialize};
use crate::core::Package;
use crate::core::compiler::unit_dependencies::UnitDep;
use crate::core::{InternedString, Package};
use crate::util;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::paths;
@ -320,7 +321,7 @@ struct DepFingerprint {
pkg_id: u64,
/// The crate name we're using for this dependency, which if we change we'll
/// need to recompile!
name: String,
name: InternedString,
/// Whether or not this dependency is flagged as a public dependency or not.
public: bool,
/// Whether or not this dependency is an rmeta dependency or a "full"
@ -446,7 +447,7 @@ impl<'de> Deserialize<'de> for DepFingerprint {
let (pkg_id, name, public, hash) = <(u64, String, bool, u64)>::deserialize(d)?;
Ok(DepFingerprint {
pkg_id,
name,
name: InternedString::new(&name),
public,
fingerprint: Arc::new(Fingerprint {
memoized_hash: Mutex::new(Some(hash)),
@ -932,12 +933,9 @@ impl DepFingerprint {
fn new<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
parent: &Unit<'a>,
dep: &Unit<'a>,
dep: &UnitDep<'a>,
) -> CargoResult<DepFingerprint> {
let fingerprint = calculate(cx, dep)?;
let name = cx.bcx.extern_crate_name(parent, dep)?;
let public = cx.bcx.is_public_dependency(parent, dep);
let fingerprint = calculate(cx, &dep.unit)?;
// We need to be careful about what we hash here. We have a goal of
// supporting renaming a project directory and not rebuilding
// everything. To do that, however, we need to make sure that the cwd
@ -948,18 +946,18 @@ impl DepFingerprint {
// names (sort of for this same reason), so if the package source is a
// `path` then we just hash the name, but otherwise we hash the full
// id as it won't change when the directory is renamed.
let pkg_id = if dep.pkg.package_id().source_id().is_path() {
util::hash_u64(dep.pkg.package_id().name())
let pkg_id = if dep.unit.pkg.package_id().source_id().is_path() {
util::hash_u64(dep.unit.pkg.package_id().name())
} else {
util::hash_u64(dep.pkg.package_id())
util::hash_u64(dep.unit.pkg.package_id())
};
Ok(DepFingerprint {
pkg_id,
name,
public,
name: dep.extern_crate_name,
public: dep.public,
fingerprint,
only_requires_rmeta: cx.only_requires_rmeta(parent, dep),
only_requires_rmeta: cx.only_requires_rmeta(parent, &dep.unit),
})
}
}
@ -1039,11 +1037,13 @@ fn calculate_normal<'a, 'cfg>(
// Skip fingerprints of binaries because they don't actually induce a
// recompile, they're just dependencies in the sense that they need to be
// built.
let mut deps = cx
.dep_targets(unit)
.iter()
.filter(|u| !u.target.is_bin())
.map(|dep| DepFingerprint::new(cx, unit, dep))
//
// Create Vec since mutable cx is needed in closure.
let deps = Vec::from(cx.unit_deps(unit));
let mut deps = deps
.into_iter()
.filter(|dep| !dep.unit.target.is_bin())
.map(|dep| DepFingerprint::new(cx, unit, &dep))
.collect::<CargoResult<Vec<_>>>()?;
deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
@ -1132,9 +1132,10 @@ fn calculate_run_custom_build<'a, 'cfg>(
// Overridden build scripts don't need to track deps.
vec![]
} else {
cx.dep_targets(unit)
.iter()
.map(|dep| DepFingerprint::new(cx, unit, dep))
// Create Vec since mutable cx is needed in closure.
let deps = Vec::from(cx.unit_deps(unit));
deps.into_iter()
.map(|dep| DepFingerprint::new(cx, unit, &dep))
.collect::<CargoResult<Vec<_>>>()?
};

View File

@ -0,0 +1,59 @@
use super::unit_dependencies::UnitGraph;
use crate::core::{PackageId, Resolve};
use crate::util::errors::CargoResult;
use std::collections::{HashMap, HashSet};
use std::fmt::Write;
/// Validate `links` field does not conflict between packages.
pub fn validate_links(resolve: &Resolve, unit_graph: &UnitGraph<'_>) -> CargoResult<()> {
// NOTE: This is the *old* links validator. Links are usually validated in
// the resolver. However, the `links` field was added to the index in
// early 2018 (see https://github.com/rust-lang/cargo/pull/4978). However,
// `links` has been around since 2014, so there are still many crates in
// the index that don't have `links` properly set in the index (over 600
// at the time of this writing in 2019). This can probably be removed at
// some point in the future, though it might be worth considering fixing
// the index.
let mut validated: HashSet<PackageId> = HashSet::new();
let mut links: HashMap<String, PackageId> = HashMap::new();
let mut units: Vec<_> = unit_graph.keys().collect();
// Sort primarily to make testing easier.
units.sort_unstable();
for unit in units {
if !validated.insert(unit.pkg.package_id()) {
continue;
}
let lib = match unit.pkg.manifest().links() {
Some(lib) => lib,
None => continue,
};
if let Some(&prev) = links.get(lib) {
let pkg = unit.pkg.package_id();
let describe_path = |pkgid: PackageId| -> String {
let dep_path = resolve.path_to_top(&pkgid);
let mut dep_path_desc = format!("package `{}`", dep_path[0]);
for dep in dep_path.iter().skip(1) {
write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
}
dep_path_desc
};
failure::bail!(
"multiple packages link to native library `{}`, \
but a native library can be linked only once\n\
\n\
{}\nlinks to native library `{}`\n\
\n\
{}\nalso links to native library `{}`",
lib,
describe_path(prev),
lib,
describe_path(pkg),
lib
)
}
links.insert(lib.to_string(), unit.pkg.package_id());
}
Ok(())
}

View File

@ -8,9 +8,11 @@ mod fingerprint;
mod job;
mod job_queue;
mod layout;
mod links;
mod output_depinfo;
pub mod standard_lib;
mod unit;
mod unit_dependencies;
pub mod unit_dependencies;
use std::env;
use std::ffi::{OsStr, OsString};
@ -36,7 +38,7 @@ use self::job::{Job, Work};
use self::job_queue::{JobQueue, JobState};
pub use self::layout::is_bad_artifact_name;
use self::output_depinfo::output_depinfo;
pub use self::unit_dependencies::build_unit_dependencies;
use self::unit_dependencies::UnitDep;
pub use crate::core::compiler::unit::{Unit, UnitInterner};
use crate::core::manifest::TargetSourcePath;
use crate::core::profiles::{Lto, PanicStrategy, Profile};
@ -123,7 +125,6 @@ fn compile<'a, 'cfg: 'a>(
// we've got everything constructed.
let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name()));
fingerprint::prepare_init(cx, unit)?;
cx.links.validate(bcx.resolve, unit)?;
let job = if unit.mode.is_run_custom_build() {
custom_build::prepare(cx, unit)?
@ -987,38 +988,39 @@ fn build_deps_args<'a, 'cfg>(
});
}
let dep_targets = cx.dep_targets(unit);
// Create Vec since mutable cx is needed in closure below.
let deps = Vec::from(cx.unit_deps(unit));
// If there is not one linkable target but should, rustc fails later
// on if there is an `extern crate` for it. This may turn into a hard
// error in the future (see PR #4797).
if !dep_targets
if !deps
.iter()
.any(|u| !u.mode.is_doc() && u.target.linkable())
.any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.linkable())
{
if let Some(u) = dep_targets
if let Some(dep) = deps
.iter()
.find(|u| !u.mode.is_doc() && u.target.is_lib())
.find(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_lib())
{
bcx.config.shell().warn(format!(
"The package `{}` \
provides no linkable target. The compiler might raise an error while compiling \
`{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
Cargo.toml. This warning might turn into a hard error in the future.",
u.target.crate_name(),
dep.unit.target.crate_name(),
unit.target.crate_name(),
u.target.crate_name()
dep.unit.target.crate_name()
))?;
}
}
let mut unstable_opts = false;
for dep in dep_targets {
if dep.mode.is_run_custom_build() {
cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep));
for dep in deps {
if dep.unit.mode.is_run_custom_build() {
cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit));
}
if dep.target.linkable() && !dep.mode.is_doc() {
if dep.unit.target.linkable() && !dep.unit.mode.is_doc() {
link_to(cmd, cx, unit, &dep, &mut unstable_opts)?;
}
}
@ -1035,13 +1037,11 @@ fn build_deps_args<'a, 'cfg>(
cmd: &mut ProcessBuilder,
cx: &mut Context<'a, 'cfg>,
current: &Unit<'a>,
dep: &Unit<'a>,
dep: &UnitDep<'a>,
need_unstable_opts: &mut bool,
) -> CargoResult<()> {
let bcx = cx.bcx;
let mut value = OsString::new();
value.push(bcx.extern_crate_name(current, dep)?);
value.push(dep.extern_crate_name.as_str());
value.push("=");
let mut pass = |file| {
@ -1054,7 +1054,7 @@ fn build_deps_args<'a, 'cfg>(
.features()
.require(Feature::public_dependency())
.is_ok()
&& !bcx.is_public_dependency(current, dep)
&& !dep.public
{
cmd.arg("--extern-private");
*need_unstable_opts = true;
@ -1065,13 +1065,13 @@ fn build_deps_args<'a, 'cfg>(
cmd.arg(&value);
};
let outputs = cx.outputs(dep)?;
let outputs = cx.outputs(&dep.unit)?;
let mut outputs = outputs.iter().filter_map(|output| match output.flavor {
FileFlavor::Linkable { rmeta } => Some((output, rmeta)),
_ => None,
});
if cx.only_requires_rmeta(current, dep) {
if cx.only_requires_rmeta(current, &dep.unit) {
let (output, _rmeta) = outputs
.find(|(_output, rmeta)| *rmeta)
.expect("failed to find rlib dep for pipelined dep");

View File

@ -0,0 +1,160 @@
//! Code for building the standard library.
use crate::core::compiler::{BuildContext, CompileMode, Kind, Unit};
use crate::core::profiles::UnitFor;
use crate::core::resolver::ResolveOpts;
use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace};
use crate::ops::{self, Packages};
use crate::util::errors::CargoResult;
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
/// Parse the `-Zbuild-std` flag.
pub fn parse_unstable_flag(value: Option<&str>) -> Vec<String> {
// This is a temporary hack until there is a more principled way to
// declare dependencies in Cargo.toml.
let value = value.unwrap_or("std");
let mut crates: HashSet<&str> = value.split(',').collect();
if crates.contains("std") {
crates.insert("core");
crates.insert("panic_unwind");
crates.insert("compiler_builtins");
} else if crates.contains("core") {
crates.insert("compiler_builtins");
}
crates.into_iter().map(|s| s.to_string()).collect()
}
/// Resolve the standard library dependencies.
pub fn resolve_std<'cfg>(
ws: &Workspace<'cfg>,
crates: &[String],
) -> CargoResult<(PackageSet<'cfg>, Resolve)> {
let src_path = detect_sysroot_src_path(ws)?;
let mut patch = HashMap::new();
let crates_io_url = crate::sources::CRATES_IO_INDEX.parse().unwrap();
// rustc-std-workspace-core = { path = 'src/tools/rustc-std-workspace-core' }
let source_path_core =
SourceId::for_path(&src_path.join("src/tools/rustc-std-workspace-core"))?;
let core = Dependency::parse_no_deprecated("rustc-std-workspace-core", None, source_path_core)?;
// rustc-std-workspace-alloc = { path = 'src/tools/rustc-std-workspace-alloc' }
let source_path_alloc =
SourceId::for_path(&src_path.join("src/tools/rustc-std-workspace-alloc"))?;
let alloc =
Dependency::parse_no_deprecated("rustc-std-workspace-alloc", None, source_path_alloc)?;
patch.insert(crates_io_url, vec![core, alloc]);
let members = vec![
String::from("src/libstd"),
String::from("src/libcore"),
String::from("src/liballoc"),
String::from("src/libtest"),
];
let ws_config = crate::core::WorkspaceConfig::Root(crate::core::WorkspaceRootConfig::new(
&src_path,
&Some(members),
/*default_members*/ &None,
/*exclude*/ &None,
));
let virtual_manifest = crate::core::VirtualManifest::new(
/*replace*/ Vec::new(),
patch,
ws_config,
// Profiles are not used here, but we need something to pass in.
ws.profiles().clone(),
crate::core::Features::default(),
);
let config = ws.config();
// This is a delicate hack. In order for features to resolve correctly,
// the resolver needs to run a specific "current" member of the workspace.
// Thus, in order to set the features for `std`, we need to set `std` to
// be the "current" member. Since none of the other crates need to alter
// their features, this should be fine, for now. Perhaps in the future
// features will be decoupled from the resolver and it will be easier to
// control feature selection.
let current_manifest = src_path.join("src/libstd/Cargo.toml");
// TODO: Consider setting require_option_deps false?
// TODO: Consider doing something to enforce --locked? Or to prevent the
// lock file from being written, such as setting ephemeral.
let std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, config)?;
// `test` is not in the default set because it is optional, but it needs
// to be part of the resolve in case we do need it.
let mut spec_pkgs = Vec::from(crates);
spec_pkgs.push("test".to_string());
let spec = Packages::Packages(spec_pkgs);
let specs = spec.to_package_id_specs(&std_ws)?;
let features = vec!["panic-unwind".to_string(), "backtrace".to_string()];
// dev_deps setting shouldn't really matter here.
let opts = ResolveOpts::new(
/*dev_deps*/ false, &features, /*all_features*/ false,
/*uses_default_features*/ true,
);
let resolve = ops::resolve_ws_with_opts(&std_ws, opts, &specs)?;
Ok(resolve)
}
/// Generate a list of root `Unit`s for the standard library.
///
/// The given slice of crate names is the root set.
pub fn generate_std_roots<'a>(
bcx: &BuildContext<'a, '_>,
crates: &[String],
std_resolve: &'a Resolve,
) -> CargoResult<Vec<Unit<'a>>> {
// Generate the root Units for the standard library.
let std_ids = crates
.iter()
.map(|crate_name| std_resolve.query(crate_name))
.collect::<CargoResult<Vec<PackageId>>>()?;
// Convert PackageId to Package.
let std_pkgs = bcx.packages.get_many(std_ids)?;
// Generate a list of Units.
std_pkgs
.into_iter()
.map(|pkg| {
let lib = pkg
.targets()
.iter()
.find(|t| t.is_lib())
.expect("std has a lib");
let unit_for = UnitFor::new_normal();
// I don't think we need to bother with Check here, the difference
// in time is minimal, and the difference in caching is
// significant.
let mode = CompileMode::Build;
let profile = bcx.profiles.get_profile(
pkg.package_id(),
/*is_member*/ false,
unit_for,
mode,
bcx.build_config.release,
);
let features = std_resolve.features_sorted(pkg.package_id());
Ok(bcx
.units
.intern(pkg, lib, profile, Kind::Target, mode, features))
})
.collect::<CargoResult<Vec<_>>>()
}
fn detect_sysroot_src_path(ws: &Workspace<'_>) -> CargoResult<PathBuf> {
// NOTE: This is temporary until we figure out how to acquire the source.
// If we decide to keep the sysroot probe, then BuildConfig will need to
// be restructured so that the TargetInfo is created earlier and passed
// in, so we don't have this extra call to rustc.
let rustc = ws.config().load_global_rustc(Some(ws))?;
let output = rustc.process().arg("--print=sysroot").exec_with_output()?;
let s = String::from_utf8(output.stdout)
.map_err(|e| failure::format_err!("rustc didn't return utf8 output: {:?}", e))?;
let sysroot = PathBuf::from(s.trim());
let src_path = sysroot.join("lib").join("rustlib").join("src").join("rust");
let lock = src_path.join("Cargo.lock");
if !lock.exists() {
failure::bail!(
"{:?} does not exist, unable to build with the standard \
library, try:\n rustup component add rust-src",
lock
);
}
Ok(src_path)
}

View File

@ -103,6 +103,7 @@ impl<'a> fmt::Debug for Unit<'a> {
.field("profile", &self.profile)
.field("kind", &self.kind)
.field("mode", &self.mode)
.field("features", &self.features)
.finish()
}
}

View File

@ -19,32 +19,254 @@ use crate::core::compiler::Unit;
use crate::core::compiler::{BuildContext, CompileMode, Kind};
use crate::core::dependency::Kind as DepKind;
use crate::core::package::Downloads;
use crate::core::profiles::UnitFor;
use crate::core::{Package, PackageId, Target};
use crate::core::profiles::{Profile, UnitFor};
use crate::core::resolver::Resolve;
use crate::core::{InternedString, Package, PackageId, Target};
use crate::CargoResult;
use log::trace;
use std::collections::{HashMap, HashSet};
/// The dependency graph of Units.
pub type UnitGraph<'a> = HashMap<Unit<'a>, Vec<UnitDep<'a>>>;
/// A unit dependency.
#[derive(Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub struct UnitDep<'a> {
/// The dependency unit.
pub unit: Unit<'a>,
/// The purpose of this dependency (a dependency for a test, or a build
/// script, etc.).
pub unit_for: UnitFor,
/// The name the parent uses to refer to this dependency.
pub extern_crate_name: InternedString,
/// Whether or not this is a public dependency.
pub public: bool,
}
/// Collection of stuff used while creating the `UnitGraph`.
struct State<'a, 'cfg> {
bcx: &'a BuildContext<'a, 'cfg>,
waiting_on_download: HashSet<PackageId>,
downloads: Downloads<'a, 'cfg>,
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
unit_dependencies: UnitGraph<'a>,
package_cache: HashMap<PackageId, &'a Package>,
usr_resolve: &'a Resolve,
std_resolve: Option<&'a Resolve>,
/// This flag is `true` while generating the dependencies for the standard
/// library.
is_std: bool,
}
pub fn build_unit_dependencies<'a, 'cfg>(
bcx: &'a BuildContext<'a, 'cfg>,
resolve: &'a Resolve,
std_resolve: Option<&'a Resolve>,
roots: &[Unit<'a>],
) -> CargoResult<HashMap<Unit<'a>, Vec<Unit<'a>>>> {
std_roots: &[Unit<'a>],
) -> CargoResult<UnitGraph<'a>> {
let mut state = State {
bcx,
downloads: bcx.packages.enable_download()?,
waiting_on_download: HashSet::new(),
unit_dependencies: HashMap::new(),
package_cache: HashMap::new(),
usr_resolve: resolve,
std_resolve,
is_std: false,
};
let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?;
let libtest_unit_deps = calc_deps_of_libtest(&mut state, std_roots, roots)?;
deps_of_roots(roots, &mut state)?;
super::links::validate_links(state.resolve(), &state.unit_dependencies)?;
// Hopefully there aren't any links conflicts with the standard library?
if let Some(mut std_unit_deps) = std_unit_deps {
if let Some(libtest_unit_deps) = libtest_unit_deps {
attach_std_test(&mut state, libtest_unit_deps, &std_unit_deps);
}
fixup_proc_macro(&mut std_unit_deps);
attach_std_deps(&mut state, std_roots, std_unit_deps);
}
connect_run_custom_build_deps(&mut state.unit_dependencies);
// Dependencies are used in tons of places throughout the backend, many of
// which affect the determinism of the build itself. As a result be sure
// that dependency lists are always sorted to ensure we've always got a
// deterministic output.
for list in state.unit_dependencies.values_mut() {
list.sort();
}
trace!("ALL UNIT DEPENDENCIES {:#?}", state.unit_dependencies);
Ok(state.unit_dependencies)
}
/// Compute all the dependencies for the standard library.
fn calc_deps_of_std<'a, 'cfg>(
mut state: &mut State<'a, 'cfg>,
std_roots: &[Unit<'a>],
) -> CargoResult<Option<UnitGraph<'a>>> {
if std_roots.is_empty() {
return Ok(None);
}
// Compute dependencies for the standard library.
state.is_std = true;
deps_of_roots(std_roots, &mut state)?;
state.is_std = false;
Ok(Some(std::mem::replace(
&mut state.unit_dependencies,
HashMap::new(),
)))
}
/// Compute all the dependencies for libtest.
/// Returns None if libtest is not needed.
fn calc_deps_of_libtest<'a, 'cfg>(
mut state: &mut State<'a, 'cfg>,
std_roots: &[Unit<'a>],
roots: &[Unit<'a>],
) -> CargoResult<Option<UnitGraph<'a>>> {
// Conditionally include libtest.
if std_roots.is_empty()
|| !roots
.iter()
.any(|unit| unit.mode.is_rustc_test() && unit.target.harness())
{
return Ok(None);
}
state.is_std = true;
let test_id = state.resolve().query("test")?;
let test_pkg = state.get(test_id)?.expect("test doesn't need downloading");
let test_target = test_pkg
.targets()
.iter()
.find(|t| t.is_lib())
.expect("test has a lib");
let test_unit = new_unit(
state,
test_pkg,
test_target,
UnitFor::new_normal(),
Kind::Target,
CompileMode::Build,
);
let res = calc_deps_of_std(state, &[test_unit])?;
state.is_std = false;
Ok(res)
}
/// `proc_macro` has an implicit dependency on `std`, add it.
fn fixup_proc_macro<'a>(std_unit_deps: &mut UnitGraph<'a>) {
// Synthesize a dependency from proc_macro -> std.
//
// This is a gross hack. This wouldn't be necessary with `--sysroot`. See
// also libtest below.
if let Some(std) = std_unit_deps
.keys()
.find(|unit| unit.pkg.name().as_str() == "std" && unit.target.is_lib())
.cloned()
{
for (unit, deps) in std_unit_deps.iter_mut() {
if unit.pkg.name().as_str() == "proc_macro" {
deps.push(UnitDep {
unit: std,
unit_for: UnitFor::new_normal(),
extern_crate_name: InternedString::new("std"),
public: true,
});
}
}
}
}
/// Add libtest as a dependency of any test unit that needs it.
fn attach_std_test<'a, 'cfg>(
state: &mut State<'a, 'cfg>,
mut libtest_unit_deps: UnitGraph<'a>,
std_unit_deps: &UnitGraph<'a>,
) {
// Attach libtest to any test unit.
let (test_unit, test_deps) = libtest_unit_deps
.iter_mut()
.find(|(k, _v)| k.pkg.name().as_str() == "test" && k.target.is_lib())
.expect("test in deps");
for (unit, deps) in state.unit_dependencies.iter_mut() {
if unit.kind == Kind::Target && unit.mode.is_rustc_test() && unit.target.harness() {
// `public` here will need to be driven by toml declaration.
deps.push(UnitDep {
unit: *test_unit,
unit_for: UnitFor::new_normal(),
extern_crate_name: test_unit.pkg.name(),
public: false,
});
}
}
// Synthesize a dependency from libtest -> libc.
//
// This is a gross hack. In theory, libtest should explicitly list this,
// but presumably it would cause libc to be built again when it just uses
// the version from sysroot. This won't be necessary if Cargo uses
// `--sysroot`.
let libc_unit = std_unit_deps
.keys()
.find(|unit| unit.pkg.name().as_str() == "libc" && unit.target.is_lib())
.expect("libc in deps");
let libc_dep = UnitDep {
unit: *libc_unit,
unit_for: UnitFor::new_normal(),
extern_crate_name: InternedString::new(&libc_unit.target.crate_name()),
public: false,
};
test_deps.push(libc_dep);
// And also include the dependencies of libtest itself.
for (unit, deps) in libtest_unit_deps.into_iter() {
if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) {
panic!(
"libtest unit collision with existing unit: {:?}",
other_unit
);
}
}
}
/// Add the standard library units to the `unit_dependencies`.
fn attach_std_deps<'a, 'cfg>(
state: &mut State<'a, 'cfg>,
std_roots: &[Unit<'a>],
std_unit_deps: UnitGraph<'a>,
) {
// Attach the standard library as a dependency of every target unit.
for (unit, deps) in state.unit_dependencies.iter_mut() {
if unit.kind == Kind::Target && !unit.mode.is_run_custom_build() {
deps.extend(std_roots.iter().map(|unit| UnitDep {
unit: *unit,
unit_for: UnitFor::new_normal(),
extern_crate_name: unit.pkg.name(),
// TODO: Does this `public` make sense?
public: true,
}));
}
}
// And also include the dependencies of the standard library itself.
for (unit, deps) in std_unit_deps.into_iter() {
if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) {
panic!("std unit collision with existing unit: {:?}", other_unit);
}
}
}
/// Compute all the dependencies of the given root units.
/// The result is stored in state.unit_dependencies.
fn deps_of_roots<'a, 'cfg>(roots: &[Unit<'a>], mut state: &mut State<'a, 'cfg>) -> CargoResult<()> {
// Loop because we are downloading while building the dependency graph.
// The partially-built unit graph is discarded through each pass of the
// loop because it is incomplete because not all required Packages have
// been downloaded.
loop {
for unit in roots.iter() {
state.get(unit.pkg.package_id())?;
@ -77,22 +299,10 @@ pub fn build_unit_dependencies<'a, 'cfg>(
break;
}
}
connect_run_custom_build_deps(&mut state);
trace!("ALL UNIT DEPENDENCIES {:#?}", state.unit_dependencies);
// Dependencies are used in tons of places throughout the backend, many of
// which affect the determinism of the build itself. As a result be sure
// that dependency lists are always sorted to ensure we've always got a
// deterministic output.
for list in state.unit_dependencies.values_mut() {
list.sort();
}
Ok(state.unit_dependencies)
Ok(())
}
/// Compute the dependencies of a single unit.
fn deps_of<'a, 'cfg>(
unit: &Unit<'a>,
state: &mut State<'a, 'cfg>,
@ -106,10 +316,9 @@ fn deps_of<'a, 'cfg>(
// affect anything else in the hierarchy.
if !state.unit_dependencies.contains_key(unit) {
let unit_deps = compute_deps(unit, state, unit_for)?;
let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect();
state.unit_dependencies.insert(*unit, to_insert);
for (unit, unit_for) in unit_deps {
deps_of(&unit, state, unit_for)?;
state.unit_dependencies.insert(*unit, unit_deps.clone());
for unit_dep in unit_deps {
deps_of(&unit_dep.unit, state, unit_dep.unit_for)?;
}
}
Ok(())
@ -123,9 +332,9 @@ fn compute_deps<'a, 'cfg>(
unit: &Unit<'a>,
state: &mut State<'a, 'cfg>,
unit_for: UnitFor,
) -> CargoResult<Vec<(Unit<'a>, UnitFor)>> {
) -> CargoResult<Vec<UnitDep<'a>>> {
if unit.mode.is_run_custom_build() {
return compute_deps_custom_build(unit, state.bcx);
return compute_deps_custom_build(unit, state);
} else if unit.mode.is_doc() {
// Note: this does not include doc test.
return compute_deps_doc(unit, state);
@ -133,7 +342,7 @@ fn compute_deps<'a, 'cfg>(
let bcx = state.bcx;
let id = unit.pkg.package_id();
let deps = bcx.resolve.deps(id).filter(|&(_id, deps)| {
let deps = state.resolve().deps(id).filter(|&(_id, deps)| {
assert!(!deps.is_empty());
deps.iter().any(|dep| {
// If this target is a build command, then we only want build
@ -182,13 +391,21 @@ fn compute_deps<'a, 'cfg>(
&& lib.proc_macro()
&& unit.kind == Kind::Target
{
let unit = new_unit(bcx, pkg, lib, dep_unit_for, Kind::Target, mode);
ret.push((unit, dep_unit_for));
let unit = new_unit(bcx, pkg, lib, dep_unit_for, Kind::Host, mode);
ret.push((unit, dep_unit_for));
let unit_dep = new_unit_dep(state, unit, pkg, lib, dep_unit_for, Kind::Target, mode)?;
ret.push(unit_dep);
let unit_dep = new_unit_dep(state, unit, pkg, lib, dep_unit_for, Kind::Host, mode)?;
ret.push(unit_dep);
} else {
let unit = new_unit(bcx, pkg, lib, dep_unit_for, unit.kind.for_target(lib), mode);
ret.push((unit, dep_unit_for));
let unit_dep = new_unit_dep(
state,
unit,
pkg,
lib,
dep_unit_for,
unit.kind.for_target(lib),
mode,
)?;
ret.push(unit_dep);
}
}
@ -198,7 +415,7 @@ fn compute_deps<'a, 'cfg>(
if unit.target.is_custom_build() {
return Ok(ret);
}
ret.extend(dep_build_script(unit, bcx));
ret.extend(dep_build_script(unit, state)?);
// If this target is a binary, test, example, etc, then it depends on
// the library of the same package. The call to `resolve.deps` above
@ -207,7 +424,7 @@ fn compute_deps<'a, 'cfg>(
if unit.target.is_lib() && unit.mode != CompileMode::Doctest {
return Ok(ret);
}
ret.extend(maybe_lib(unit, bcx, unit_for));
ret.extend(maybe_lib(unit, state, unit_for)?);
// If any integration tests/benches are being run, make sure that
// binaries are built as well.
@ -229,18 +446,17 @@ fn compute_deps<'a, 'cfg>(
})
})
.map(|t| {
(
new_unit(
bcx,
unit.pkg,
t,
UnitFor::new_normal(),
unit.kind.for_target(t),
CompileMode::Build,
),
new_unit_dep(
state,
unit,
unit.pkg,
t,
UnitFor::new_normal(),
unit.kind.for_target(t),
CompileMode::Build,
)
}),
})
.collect::<CargoResult<Vec<UnitDep<'a>>>>()?,
);
}
@ -253,15 +469,14 @@ fn compute_deps<'a, 'cfg>(
/// the returned set of units must all be run before `unit` is run.
fn compute_deps_custom_build<'a, 'cfg>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>,
) -> CargoResult<Vec<(Unit<'a>, UnitFor)>> {
state: &mut State<'a, 'cfg>,
) -> CargoResult<Vec<UnitDep<'a>>> {
if let Some(links) = unit.pkg.manifest().links() {
if bcx.script_override(links, unit.kind).is_some() {
if state.bcx.script_override(links, unit.kind).is_some() {
// Overridden build scripts don't have any dependencies.
return Ok(Vec::new());
}
}
// When not overridden, then the dependencies to run a build script are:
//
// 1. Compiling the build script itself.
@ -271,28 +486,29 @@ fn compute_deps_custom_build<'a, 'cfg>(
// We don't have a great way of handling (2) here right now so this is
// deferred until after the graph of all unit dependencies has been
// constructed.
let unit = new_unit(
bcx,
let unit_dep = new_unit_dep(
state,
unit,
unit.pkg,
unit.target,
// All dependencies of this unit should use profiles for custom
// builds.
UnitFor::new_build(),
// Build scripts always compiled for the host.
Kind::Host,
CompileMode::Build,
);
// All dependencies of this unit should use profiles for custom
// builds.
Ok(vec![(unit, UnitFor::new_build())])
)?;
Ok(vec![unit_dep])
}
/// Returns the dependencies necessary to document a package.
fn compute_deps_doc<'a, 'cfg>(
unit: &Unit<'a>,
state: &mut State<'a, 'cfg>,
) -> CargoResult<Vec<(Unit<'a>, UnitFor)>> {
) -> CargoResult<Vec<UnitDep<'a>>> {
let bcx = state.bcx;
let deps = bcx
.resolve
let deps = state
.resolve()
.deps(unit.pkg.package_id())
.filter(|&(_id, deps)| {
deps.iter().any(|dep| match dep.kind() {
@ -318,42 +534,63 @@ fn compute_deps_doc<'a, 'cfg>(
// However, for plugins/proc macros, deps should be built like normal.
let mode = check_or_build_mode(unit.mode, lib);
let dep_unit_for = UnitFor::new_normal().with_for_host(lib.for_host());
let lib_unit = new_unit(bcx, dep, lib, dep_unit_for, unit.kind.for_target(lib), mode);
ret.push((lib_unit, dep_unit_for));
let lib_unit_dep = new_unit_dep(
state,
unit,
dep,
lib,
dep_unit_for,
unit.kind.for_target(lib),
mode,
)?;
ret.push(lib_unit_dep);
if let CompileMode::Doc { deps: true } = unit.mode {
// Document this lib as well.
let doc_unit = new_unit(
bcx,
let doc_unit_dep = new_unit_dep(
state,
unit,
dep,
lib,
dep_unit_for,
unit.kind.for_target(lib),
unit.mode,
);
ret.push((doc_unit, dep_unit_for));
)?;
ret.push(doc_unit_dep);
}
}
// Be sure to build/run the build script for documented libraries.
ret.extend(dep_build_script(unit, bcx));
ret.extend(dep_build_script(unit, state)?);
// If we document a binary/example, we need the library available.
if unit.target.is_bin() || unit.target.is_example() {
ret.extend(maybe_lib(unit, bcx, UnitFor::new_normal()));
ret.extend(maybe_lib(unit, state, UnitFor::new_normal())?);
}
Ok(ret)
}
fn maybe_lib<'a>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, '_>,
state: &mut State<'a, '_>,
unit_for: UnitFor,
) -> Option<(Unit<'a>, UnitFor)> {
unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
let mode = check_or_build_mode(unit.mode, t);
let unit = new_unit(bcx, unit.pkg, t, unit_for, unit.kind.for_target(t), mode);
(unit, unit_for)
})
) -> CargoResult<Option<UnitDep<'a>>> {
unit.pkg
.targets()
.iter()
.find(|t| t.linkable())
.map(|t| {
let mode = check_or_build_mode(unit.mode, t);
new_unit_dep(
state,
unit,
unit.pkg,
t,
unit_for,
unit.kind.for_target(t),
mode,
)
})
.transpose()
}
/// If a build script is scheduled to be run for the package specified by
@ -365,8 +602,8 @@ fn maybe_lib<'a>(
/// build script.
fn dep_build_script<'a>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, '_>,
) -> Option<(Unit<'a>, UnitFor)> {
state: &State<'a, '_>,
) -> CargoResult<Option<UnitDep<'a>>> {
unit.pkg
.targets()
.iter()
@ -374,17 +611,22 @@ fn dep_build_script<'a>(
.map(|t| {
// The profile stored in the Unit is the profile for the thing
// the custom build script is running for.
let unit = bcx.units.intern(
let profile = state
.bcx
.profiles
.get_profile_run_custom_build(&unit.profile);
new_unit_dep_with_profile(
state,
unit,
unit.pkg,
t,
bcx.profiles.get_profile_run_custom_build(&unit.profile),
UnitFor::new_build(),
unit.kind,
CompileMode::RunCustomBuild,
bcx.resolve.features_sorted(unit.pkg.package_id()),
);
(unit, UnitFor::new_build())
profile,
)
})
.transpose()
}
/// Choose the correct mode for dependencies.
@ -406,23 +648,77 @@ fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode {
}
fn new_unit<'a>(
bcx: &BuildContext<'a, '_>,
state: &State<'a, '_>,
pkg: &'a Package,
target: &'a Target,
unit_for: UnitFor,
kind: Kind,
mode: CompileMode,
) -> Unit<'a> {
let profile = bcx.profiles.get_profile(
let profile = state.bcx.profiles.get_profile(
pkg.package_id(),
bcx.ws.is_member(pkg),
state.bcx.ws.is_member(pkg),
unit_for,
mode,
bcx.build_config.release,
state.bcx.build_config.release,
);
let features = bcx.resolve.features_sorted(pkg.package_id());
bcx.units.intern(pkg, target, profile, kind, mode, features)
let features = state.resolve().features_sorted(pkg.package_id());
state
.bcx
.units
.intern(pkg, target, profile, kind, mode, features)
}
fn new_unit_dep<'a>(
state: &State<'a, '_>,
parent: &Unit<'a>,
pkg: &'a Package,
target: &'a Target,
unit_for: UnitFor,
kind: Kind,
mode: CompileMode,
) -> CargoResult<UnitDep<'a>> {
let profile = state.bcx.profiles.get_profile(
pkg.package_id(),
state.bcx.ws.is_member(pkg),
unit_for,
mode,
state.bcx.build_config.release,
);
new_unit_dep_with_profile(state, parent, pkg, target, unit_for, kind, mode, profile)
}
fn new_unit_dep_with_profile<'a>(
state: &State<'a, '_>,
parent: &Unit<'a>,
pkg: &'a Package,
target: &'a Target,
unit_for: UnitFor,
kind: Kind,
mode: CompileMode,
profile: Profile,
) -> CargoResult<UnitDep<'a>> {
// TODO: consider making extern_crate_name return InternedString?
let extern_crate_name = InternedString::new(&state.resolve().extern_crate_name(
parent.pkg.package_id(),
pkg.package_id(),
target,
)?);
let public = state
.resolve()
.is_public_dep(parent.pkg.package_id(), pkg.package_id());
let features = state.resolve().features_sorted(pkg.package_id());
let unit = state
.bcx
.units
.intern(pkg, target, profile, kind, mode, features);
Ok(UnitDep {
unit,
unit_for,
extern_crate_name,
public,
})
}
/// Fill in missing dependencies for units of the `RunCustomBuild`
@ -435,7 +731,7 @@ fn new_unit<'a>(
///
/// Here we take the entire `deps` map and add more dependencies from execution
/// of one build script to execution of another build script.
fn connect_run_custom_build_deps(state: &mut State<'_, '_>) {
fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph<'_>) {
let mut new_deps = Vec::new();
{
@ -444,12 +740,12 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_>) {
// example a library might depend on a build script, so this map will
// have the build script as the key and the library would be in the
// value's set.
let mut reverse_deps = HashMap::new();
for (unit, deps) in state.unit_dependencies.iter() {
let mut reverse_deps_map = HashMap::new();
for (unit, deps) in unit_dependencies.iter() {
for dep in deps {
if dep.mode == CompileMode::RunCustomBuild {
reverse_deps
.entry(dep)
if dep.unit.mode == CompileMode::RunCustomBuild {
reverse_deps_map
.entry(dep.unit)
.or_insert_with(HashSet::new)
.insert(unit);
}
@ -465,28 +761,37 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_>) {
// `links`, then we depend on that package's build script! Here we use
// `dep_build_script` to manufacture an appropriate build script unit to
// depend on.
for unit in state
.unit_dependencies
for unit in unit_dependencies
.keys()
.filter(|k| k.mode == CompileMode::RunCustomBuild)
{
let reverse_deps = match reverse_deps.get(unit) {
// This is the lib that runs this custom build.
let reverse_deps = match reverse_deps_map.get(unit) {
Some(set) => set,
None => continue,
};
let to_add = reverse_deps
.iter()
.flat_map(|reverse_dep| state.unit_dependencies[reverse_dep].iter())
// Get all deps for lib.
.flat_map(|reverse_dep| unit_dependencies[reverse_dep].iter())
// Only deps with `links`.
.filter(|other| {
other.pkg != unit.pkg
&& other.target.linkable()
&& other.pkg.manifest().links().is_some()
other.unit.pkg != unit.pkg
&& other.unit.target.linkable()
&& other.unit.pkg.manifest().links().is_some()
})
// Get the RunCustomBuild for other lib.
.filter_map(|other| {
unit_dependencies[&other.unit]
.iter()
.find(|other_dep| other_dep.unit.mode == CompileMode::RunCustomBuild)
.cloned()
})
.filter_map(|other| dep_build_script(other, state.bcx).map(|p| p.0))
.collect::<HashSet<_>>();
if !to_add.is_empty() {
// (RunCustomBuild, set(other RunCustomBuild))
new_deps.push((*unit, to_add));
}
}
@ -494,15 +799,19 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_>) {
// And finally, add in all the missing dependencies!
for (unit, new_deps) in new_deps {
state
.unit_dependencies
.get_mut(&unit)
.unwrap()
.extend(new_deps);
unit_dependencies.get_mut(&unit).unwrap().extend(new_deps);
}
}
impl<'a, 'cfg> State<'a, 'cfg> {
fn resolve(&self) -> &'a Resolve {
if self.is_std {
self.std_resolve.unwrap()
} else {
self.usr_resolve
}
}
fn get(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
if let Some(pkg) = self.package_cache.get(&id) {
return Ok(Some(pkg));

View File

@ -334,6 +334,7 @@ pub struct CliUnstable {
pub install_upgrade: bool,
pub cache_messages: bool,
pub binary_dep_depinfo: bool,
pub build_std: Option<Vec<String>>,
}
impl CliUnstable {
@ -380,6 +381,9 @@ impl CliUnstable {
"install-upgrade" => self.install_upgrade = true,
"cache-messages" => self.cache_messages = true,
"binary-dep-depinfo" => self.binary_dep_depinfo = true,
"build-std" => {
self.build_std = Some(crate::core::compiler::standard_lib::parse_unstable_flag(v))
}
_ => failure::bail!("unknown `-Z` flag specified: {}", k),
}

View File

@ -452,6 +452,18 @@ impl<'cfg> PackageSet<'cfg> {
pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'cfg>> {
self.sources.borrow_mut()
}
/// Merge the given set into self.
pub fn add_set(&mut self, set: PackageSet<'cfg>) {
assert!(!self.downloading.get());
assert!(!set.downloading.get());
for (pkg_id, p_cell) in set.packages {
self.packages.entry(pkg_id).or_insert(p_cell);
}
let mut sources = self.sources.borrow_mut();
let other_sources = set.sources.into_inner();
sources.add_source_map(other_sources);
}
}
// When dynamically linked against libcurl, we want to ignore some failures

View File

@ -576,7 +576,7 @@ impl fmt::Display for PanicStrategy {
/// Flags used in creating `Unit`s to indicate the purpose for the target, and
/// to ensure the target's dependencies have the correct settings.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub struct UnitFor {
/// A target for `build.rs` or any of its dependencies, or a proc-macro or
/// any of its dependencies. This enables `build-override` profiles for

View File

@ -318,4 +318,11 @@ impl<'src> SourceMap<'src> {
) -> impl Iterator<Item = (&'a SourceId, &'a mut (dyn Source + 'src))> {
self.map.iter_mut().map(|(a, b)| (a, &mut **b))
}
/// Merge the given map into self.
pub fn add_source_map(&mut self, other: SourceMap<'src>) {
for (key, value) in other.map {
self.map.entry(key).or_insert(value);
}
}
}

View File

@ -138,17 +138,24 @@ impl<'cfg> Workspace<'cfg> {
/// root and all member packages. It will then validate the workspace
/// before returning it, so `Ok` is only returned for valid workspaces.
pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult<Workspace<'cfg>> {
let target_dir = config.target_dir()?;
let mut ws = Workspace::new_default(manifest_path.to_path_buf(), config);
ws.target_dir = config.target_dir()?;
ws.root_manifest = ws.find_root(manifest_path)?;
ws.find_members()?;
ws.validate()?;
Ok(ws)
}
let mut ws = Workspace {
fn new_default(current_manifest: PathBuf, config: &'cfg Config) -> Workspace<'cfg> {
Workspace {
config,
current_manifest: manifest_path.to_path_buf(),
current_manifest,
packages: Packages {
config,
packages: HashMap::new(),
},
root_manifest: None,
target_dir,
target_dir: None,
members: Vec::new(),
member_ids: HashSet::new(),
default_members: Vec::new(),
@ -156,10 +163,24 @@ impl<'cfg> Workspace<'cfg> {
require_optional_deps: true,
loaded_packages: RefCell::new(HashMap::new()),
ignore_lock: false,
};
ws.root_manifest = ws.find_root(manifest_path)?;
}
}
pub fn new_virtual(
root_path: PathBuf,
current_manifest: PathBuf,
manifest: VirtualManifest,
config: &'cfg Config,
) -> CargoResult<Workspace<'cfg>> {
let mut ws = Workspace::new_default(current_manifest, config);
ws.root_manifest = Some(root_path.join("Cargo.toml"));
ws.target_dir = config.target_dir()?;
ws.packages
.packages
.insert(root_path, MaybePackage::Virtual(manifest));
ws.find_members()?;
ws.validate()?;
// TODO: validation does not work because it walks up the directory
// tree looking for the root which is a fake file that doesn't exist.
Ok(ws)
}
@ -178,37 +199,21 @@ impl<'cfg> Workspace<'cfg> {
target_dir: Option<Filesystem>,
require_optional_deps: bool,
) -> CargoResult<Workspace<'cfg>> {
let mut ws = Workspace {
config,
current_manifest: package.manifest_path().to_path_buf(),
packages: Packages {
config,
packages: HashMap::new(),
},
root_manifest: None,
target_dir: None,
members: Vec::new(),
member_ids: HashSet::new(),
default_members: Vec::new(),
is_ephemeral: true,
require_optional_deps,
loaded_packages: RefCell::new(HashMap::new()),
ignore_lock: false,
let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), config);
ws.is_ephemeral = true;
ws.require_optional_deps = require_optional_deps;
let key = ws.current_manifest.parent().unwrap();
let id = package.package_id();
let package = MaybePackage::Package(package);
ws.packages.packages.insert(key.to_path_buf(), package);
ws.target_dir = if let Some(dir) = target_dir {
Some(dir)
} else {
ws.config.target_dir()?
};
{
let key = ws.current_manifest.parent().unwrap();
let id = package.package_id();
let package = MaybePackage::Package(package);
ws.packages.packages.insert(key.to_path_buf(), package);
ws.target_dir = if let Some(dir) = target_dir {
Some(dir)
} else {
ws.config.target_dir()?
};
ws.members.push(ws.current_manifest.clone());
ws.member_ids.insert(id);
ws.default_members.push(ws.current_manifest.clone());
}
ws.members.push(ws.current_manifest.clone());
ws.member_ids.insert(id);
ws.default_members.push(ws.current_manifest.clone());
Ok(ws)
}

View File

@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::fs;
use std::path::Path;
use crate::core::compiler::build_unit_dependencies;
use crate::core::compiler::unit_dependencies;
use crate::core::compiler::UnitInterner;
use crate::core::compiler::{BuildConfig, BuildContext, CompileMode, Context, Kind};
use crate::core::profiles::UnitFor;
@ -57,7 +57,6 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
build_config.release = opts.release;
let bcx = BuildContext::new(
ws,
&resolve,
&packages,
opts.config,
&build_config,
@ -105,7 +104,8 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
}
}
let unit_dependencies = build_unit_dependencies(&bcx, &units)?;
let unit_dependencies =
unit_dependencies::build_unit_dependencies(&bcx, &resolve, None, &units, &[])?;
let mut cx = Context::new(config, &bcx, unit_dependencies)?;
cx.prepare_units(None, &units)?;

View File

@ -7,7 +7,8 @@
//! rough outline is:
//!
//! - Resolve the dependency graph (see `ops::resolve`).
//! - Download any packages needed (see `PackageSet`).
//! - Download any packages needed (see `PackageSet`). Note that dependency
//! downloads are deferred until `build_unit_dependencies`.
//! - Generate a list of top-level "units" of work for the targets the user
//! requested on the command-line. Each `Unit` corresponds to a compiler
//! invocation. This is done in this module (`generate_targets`).
@ -27,7 +28,8 @@ use std::iter::FromIterator;
use std::path::PathBuf;
use std::sync::Arc;
use crate::core::compiler::build_unit_dependencies;
use crate::core::compiler::standard_lib;
use crate::core::compiler::unit_dependencies::build_unit_dependencies;
use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
use crate::core::compiler::{CompileMode, Kind, Unit};
use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
@ -298,16 +300,38 @@ pub fn compile_ws<'a>(
Kind::Host
};
let profiles = ws.profiles();
let specs = spec.to_package_id_specs(ws)?;
let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode);
let opts = ResolveOpts::new(dev_deps, features, all_features, !no_default_features);
let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?;
let (packages, resolve_with_overrides) = resolve;
let (mut packages, resolve_with_overrides) = resolve;
let std_resolve = if let Some(crates) = &config.cli_unstable().build_std {
if build_config.requested_target.is_none() {
// TODO: This should eventually be fixed. Unfortunately it is not
// easy to get the host triple in BuildConfig. Consider changing
// requested_target to an enum, or some other approach.
failure::bail!("-Zbuild-std requires --target");
}
let (std_package_set, std_resolve) = standard_lib::resolve_std(ws, crates)?;
packages.add_set(std_package_set);
Some(std_resolve)
} else {
None
};
// Find the packages in the resolver that the user wants to build (those
// passed in with `-p` or the defaults from the workspace), and convert
// Vec<PackageIdSpec> to a Vec<&PackageId>.
let to_build_ids = specs
.iter()
.map(|s| s.query(resolve_with_overrides.iter()))
.collect::<CargoResult<Vec<_>>>()?;
// Now get the `Package` for each `PackageId`. This may trigger a download
// if the user specified `-p` for a dependency that is not downloaded.
// Dependencies will be downloaded during build_unit_dependencies.
let mut to_builds = packages.get_many(to_build_ids)?;
// The ordering here affects some error messages coming out of cargo, so
@ -316,7 +340,7 @@ pub fn compile_ws<'a>(
to_builds.sort_by_key(|p| p.package_id());
for pkg in to_builds.iter() {
pkg.manifest().print_teapot(ws.config());
pkg.manifest().print_teapot(config);
if build_config.mode.is_any_test()
&& !ws.is_member(pkg)
@ -343,13 +367,11 @@ pub fn compile_ws<'a>(
);
}
let profiles = ws.profiles();
profiles.validate_packages(&mut config.shell(), &packages)?;
let interner = UnitInterner::new();
let mut bcx = BuildContext::new(
ws,
&resolve_with_overrides,
&packages,
config,
build_config,
@ -367,6 +389,12 @@ pub fn compile_ws<'a>(
&bcx,
)?;
let std_roots = if let Some(crates) = &config.cli_unstable().build_std {
standard_lib::generate_std_roots(&bcx, crates, std_resolve.as_ref().unwrap())?
} else {
Vec::new()
};
if let Some(args) = extra_args {
if units.len() != 1 {
failure::bail!(
@ -386,7 +414,13 @@ pub fn compile_ws<'a>(
}
}
let unit_dependencies = build_unit_dependencies(&bcx, &units)?;
let unit_dependencies = build_unit_dependencies(
&bcx,
&resolve_with_overrides,
std_resolve.as_ref(),
&units,
&std_roots,
)?;
let ret = {
let _p = profile::start("compiling");
@ -583,7 +617,7 @@ fn generate_targets<'a>(
packages: &[&'a Package],
filter: &CompileFilter,
default_arch_kind: Kind,
resolve: &Resolve,
resolve: &'a Resolve,
bcx: &BuildContext<'a, '_>,
) -> CargoResult<Vec<Unit<'a>>> {
// Helper for creating a `Unit` struct.
@ -655,7 +689,7 @@ fn generate_targets<'a>(
target_mode,
bcx.build_config.release,
);
let features = bcx.resolve.features_sorted(pkg.package_id());
let features = resolve.features_sorted(pkg.package_id());
bcx.units
.intern(pkg, target, profile, kind, target_mode, features)
};

View File

@ -185,7 +185,7 @@ fn run_doc_tests(
}
for &(ref extern_crate_name, ref lib) in deps.iter() {
let mut arg = OsString::from(extern_crate_name);
let mut arg = OsString::from(extern_crate_name.as_str());
arg.push("=");
arg.push(lib);
p.arg("--extern").arg(&arg);

View File

@ -895,6 +895,17 @@ impl TomlManifest {
));
}
if let Some(links) = &project.links {
if !targets.iter().any(|t| t.is_custom_build()) {
bail!(
"package `{}` specifies that it links to `{}` but does not \
have a custom build script",
pkgid,
links
)
}
}
let mut deps = Vec::new();
let replace;
let patch;

View File

@ -333,7 +333,10 @@ fn links_no_build_cmd() {
.with_status(101)
.with_stderr(
"\
[ERROR] package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \
[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml`
Caused by:
package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \
not have a custom build script
",
)
@ -388,6 +391,61 @@ failed to select a version for `a-sys` which could resolve this conflict
").run();
}
#[cargo_test]
fn links_duplicates_old_registry() {
// Test old links validator. See `validate_links`.
Package::new("bar", "0.1.0")
.file(
"Cargo.toml",
r#"
[package]
name = "bar"
version = "0.1.0"
links = "a"
"#,
)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "")
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
links = "a"
[dependencies]
bar = "0.1"
"#,
)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "")
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[UPDATING] `[..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 ([..])
[ERROR] multiple packages link to native library `a`, \
but a native library can be linked only once
package `bar v0.1.0`
... which is depended on by `foo v0.1.0 ([..]foo)`
links to native library `a`
package `foo v0.1.0 ([..]foo)`
also links to native library `a`
",
)
.run();
}
#[cargo_test]
fn links_duplicates_deep_dependency() {
// this tests that the links_duplicates are caught at resolver time

View File

@ -91,6 +91,7 @@ mod rustflags;
mod search;
mod shell_quoting;
mod small_fd_limits;
mod standard_lib;
mod test;
mod tool_paths;
mod update;

View File

@ -0,0 +1,326 @@
use crate::support::{is_nightly, paths, project, rustc_host, Execs, Project};
fn cargo_build_std(project: &Project, cmd: &str, crates: &str) -> Execs {
let unstable = if crates.is_empty() {
"-Zbuild-std".to_string()
} else {
format!("-Zbuild-std={}", crates)
};
let target = paths::root().join("target");
let mut execs = project.cargo(cmd);
if !cmd.contains("--target") {
execs.arg("--target").arg(rustc_host());
}
execs
.arg(unstable)
.arg("-Zno-index-update")
.env_remove("CARGO_HOME")
.env_remove("HOME")
.env("CARGO_TARGET_DIR", target.as_os_str())
.masquerade_as_nightly_cargo();
execs
}
#[cargo_test]
fn std_lib() {
if !is_nightly() {
// -Zbuild-std is nightly
// -Zno-index-update is nightly
// We don't want these tests to run on rust-lang/rust.
return;
}
simple_lib_std();
simple_bin_std();
lib_nostd();
bin_nostd();
check_core();
cross_custom();
hashbrown();
libc();
test();
target_proc_macro();
bench();
doc();
check_std();
doctest();
}
fn simple_lib_std() {
let p = project().file("src/lib.rs", "").build();
cargo_build_std(&p, "build -v", "")
.with_stderr_contains("[RUNNING] `rustc [..]--crate-name std [..]")
.run();
// Check freshness.
p.change_file("src/lib.rs", " ");
cargo_build_std(&p, "build -v", "std")
.with_stderr_contains("[FRESH] std[..]")
.run();
}
fn simple_bin_std() {
let p = project().file("src/main.rs", "fn main() {}").build();
cargo_build_std(&p, "run -v", "std").run();
}
fn lib_nostd() {
let p = project()
.file(
"src/lib.rs",
r#"
#![no_std]
pub fn foo() {
assert_eq!(core::u8::MIN, 0);
}
"#,
)
.build();
cargo_build_std(&p, "build -v --lib", "core")
.with_stderr_does_not_contain("[..]libstd[..]")
.run();
}
fn bin_nostd() {
if cfg!(windows) {
// I think windows requires setting up mainCRTStartup,
// I'm not in the mood to figure it out.
return;
}
let p = project()
.file("src/lib.rs", "#![no_std] pub fn foo() {}")
.file(
"src/main.rs",
r#"
#![no_std]
#![feature(lang_items, start, core_intrinsics)]
use core::panic::PanicInfo;
#[panic_handler]
fn panic(_info: &PanicInfo) -> ! {
unsafe { core::intrinsics::abort() }
}
#[start]
fn start(_argc: isize, _argv: *const *const u8) -> isize {
foo::foo();
123
}
#[lang = "eh_personality"]
extern fn eh_personality() {}
"#,
)
.file(
"build.rs",
r#"
fn main() {
let target = std::env::var("TARGET").expect("TARGET was not set");
if target.contains("apple-darwin") {
println!("cargo:rustc-link-lib=System");
} else if target.contains("linux") {
// TODO: why is this needed?
println!("cargo:rustc-link-lib=c");
}
}
"#,
)
.build();
cargo_build_std(&p, "run -v", "core")
.with_status(123)
.with_stderr_contains("[RUNNING] [..]foo[EXE]`")
.run();
}
fn check_core() {
let p = project()
.file("src/lib.rs", "#![no_std] fn unused_fn() {}")
.build();
cargo_build_std(&p, "check -v", "core")
.with_stderr_contains("[WARNING] [..]unused_fn[..]`")
.run();
}
fn cross_custom() {
let p = project()
.file("src/lib.rs", "#![no_std] pub fn f() {}")
.file(
"custom-target.json",
r#"
{
"llvm-target": "x86_64-unknown-none-gnu",
"data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128",
"arch": "x86_64",
"target-endian": "little",
"target-pointer-width": "64",
"target-c-int-width": "32",
"os": "none",
"linker-flavor": "ld.lld"
}
"#,
)
.build();
cargo_build_std(&p, "build --target custom-target.json -v", "core").run();
}
fn hashbrown() {
let p = project()
.file(
"src/lib.rs",
r#"
pub fn f() -> hashbrown::HashMap<i32, i32> {
hashbrown::HashMap::new()
}
"#,
)
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
edition = "2018"
[dependencies]
hashbrown = "=0.4.0"
"#,
)
.build();
cargo_build_std(&p, "build -v", "std").run();
}
fn libc() {
let p = project()
.file(
"src/lib.rs",
r#"
pub fn f() -> ! {
unsafe { libc::exit(123); }
}
"#,
)
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
edition = "2018"
[dependencies]
libc = "=0.2.54"
"#,
)
.build();
cargo_build_std(&p, "build -v", "std").run();
}
fn test() {
let p = project()
.file(
"src/lib.rs",
r#"
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
"#,
)
.build();
cargo_build_std(&p, "test -v", "std")
.with_stdout_contains("test tests::it_works ... ok")
.run();
}
fn target_proc_macro() {
let p = project()
.file(
"src/lib.rs",
r#"
extern crate proc_macro;
pub fn f() {
let _ts = proc_macro::TokenStream::new();
}
"#,
)
.build();
cargo_build_std(&p, "build -v", "std,proc_macro").run();
}
fn bench() {
let p = project()
.file(
"src/lib.rs",
r#"
#![feature(test)]
extern crate test;
#[bench]
fn b1(b: &mut test::Bencher) {
b.iter(|| ())
}
"#,
)
.build();
cargo_build_std(&p, "bench -v", "std").run();
}
fn doc() {
let p = project()
.file(
"src/lib.rs",
r#"
/// Doc
pub fn f() -> Result<(), ()> {Ok(())}
"#,
)
.build();
cargo_build_std(&p, "doc -v", "std").run();
}
fn check_std() {
let p = project()
.file("src/lib.rs", "pub fn f() {}")
.file("src/main.rs", "fn main() {}")
.file(
"tests/t1.rs",
r#"
#[test]
fn t1() {
assert_eq!(1, 2);
}
"#,
)
.build();
cargo_build_std(&p, "check -v --all-targets", "std").run();
cargo_build_std(&p, "check -v --all-targets --profile=test", "std").run();
}
fn doctest() {
let p = project()
.file(
"src/lib.rs",
r#"
/// Doc
/// ```
/// assert_eq!(1, 1);
/// ```
pub fn f() {}
"#,
)
.build();
cargo_build_std(&p, "test --doc -v", "std").run();
}