Auto merge of #12565 - weihanglo:rust-1.72.0-backport, r=ehuss

[stable-1.72.0] add missing `windows-sys` features back

Stable backports:

- <https://github.com/rust-lang/cargo/pull/12563>

In order to make CI pass, the following PRs are also cherry-picked:

- b4a26b0f4c from #12475
- c508cb683e from #12538
- 43c253e69a from #12351
- 689defd4ba from #12500

---

Fixes <https://github.com/rust-lang/cargo/issues/12562>

This won't affect Rust releases, i.e. no 1.72.1 will happen. We do this only for release `cargo` crate.
This commit is contained in:
bors 2023-08-26 15:25:22 +00:00
commit 26bba48309
12 changed files with 59 additions and 33 deletions

2
Cargo.lock generated
View File

@ -262,7 +262,7 @@ dependencies = [
[[package]] [[package]]
name = "cargo" name = "cargo"
version = "0.73.0" version = "0.73.1"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base64", "base64",

View File

@ -95,7 +95,7 @@ windows-sys = "0.48"
[package] [package]
name = "cargo" name = "cargo"
version = "0.73.0" version = "0.73.1"
edition = "2021" edition = "2021"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
homepage = "https://crates.io" homepage = "https://crates.io"
@ -181,10 +181,12 @@ fwdansi.workspace = true
workspace = true workspace = true
features = [ features = [
"Win32_Foundation", "Win32_Foundation",
"Win32_Security",
"Win32_Storage_FileSystem", "Win32_Storage_FileSystem",
"Win32_System_IO",
"Win32_System_Console", "Win32_System_Console",
"Win32_System_Threading",
"Win32_System_JobObjects", "Win32_System_JobObjects",
"Win32_System_Threading",
] ]
[dev-dependencies] [dev-dependencies]

View File

@ -34,7 +34,7 @@ fn main() {
for (label, value) in autolabel.iter() { for (label, value) in autolabel.iter() {
let Some(trigger_files) = value.get("trigger_files") else { let Some(trigger_files) = value.get("trigger_files") else {
continue continue;
}; };
let trigger_files = trigger_files.as_array().expect("an array"); let trigger_files = trigger_files.as_array().expect("an array");
let missing_files: Vec<_> = trigger_files let missing_files: Vec<_> = trigger_files

View File

@ -5,7 +5,7 @@
//! rough outline is: //! rough outline is:
//! //!
//! 1. Resolve the dependency graph (see [`ops::resolve`]). //! 1. Resolve the dependency graph (see [`ops::resolve`]).
//! 2. Download any packages needed (see [`PackageSet`](crate::core::PackageSet)). //! 2. Download any packages needed (see [`PackageSet`].
//! 3. Generate a list of top-level "units" of work for the targets the user //! 3. Generate a list of top-level "units" of work for the targets the user
//! requested on the command-line. Each [`Unit`] corresponds to a compiler //! requested on the command-line. Each [`Unit`] corresponds to a compiler
//! invocation. This is done in this module ([`UnitGenerator::generate_root_units`]). //! invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).

View File

@ -141,7 +141,7 @@ pub fn certificate_check(
let Some(host_key) = cert.as_hostkey() else { let Some(host_key) = cert.as_hostkey() else {
// Return passthrough for TLS X509 certificates to use whatever validation // Return passthrough for TLS X509 certificates to use whatever validation
// was done in git2. // was done in git2.
return Ok(CertificateCheckStatus::CertificatePassthrough) return Ok(CertificateCheckStatus::CertificatePassthrough);
}; };
// If a nonstandard port is in use, check for that first. // If a nonstandard port is in use, check for that first.
// The fallback to check without a port is handled in the HostKeyNotFound handler. // The fallback to check without a port is handled in the HostKeyNotFound handler.
@ -611,10 +611,18 @@ impl KnownHost {
} }
fn hashed_hostname_matches(host: &str, hashed: &str) -> bool { fn hashed_hostname_matches(host: &str, hashed: &str) -> bool {
let Some((b64_salt, b64_host)) = hashed.split_once('|') else { return false; }; let Some((b64_salt, b64_host)) = hashed.split_once('|') else {
let Ok(salt) = STANDARD.decode(b64_salt) else { return false; }; return false;
let Ok(hashed_host) = STANDARD.decode(b64_host) else { return false; }; };
let Ok(mut mac) = hmac::Hmac::<sha1::Sha1>::new_from_slice(&salt) else { return false; }; let Ok(salt) = STANDARD.decode(b64_salt) else {
return false;
};
let Ok(hashed_host) = STANDARD.decode(b64_host) else {
return false;
};
let Ok(mut mac) = hmac::Hmac::<sha1::Sha1>::new_from_slice(&salt) else {
return false;
};
mac.update(host.as_bytes()); mac.update(host.as_bytes());
let result = mac.finalize().into_bytes(); let result = mac.finalize().into_bytes();
hashed_host == &result[..] hashed_host == &result[..]

View File

@ -808,7 +808,9 @@ impl<'cfg> Downloads<'cfg> {
/// Updates the state of the progress bar for downloads. /// Updates the state of the progress bar for downloads.
fn tick(&self) -> CargoResult<()> { fn tick(&self) -> CargoResult<()> {
let mut progress = self.progress.borrow_mut(); let mut progress = self.progress.borrow_mut();
let Some(progress) = progress.as_mut() else { return Ok(()); }; let Some(progress) = progress.as_mut() else {
return Ok(());
};
// Since the sparse protocol discovers dependencies as it goes, // Since the sparse protocol discovers dependencies as it goes,
// it's not possible to get an accurate progress indication. // it's not possible to get an accurate progress indication.

View File

@ -887,7 +887,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
impl RegistryConfig { impl RegistryConfig {
/// File name of [`RegistryConfig`]. /// File name of [`RegistryConfig`].
const NAME: &str = "config.json"; const NAME: &'static str = "config.json";
} }
/// Get the maximum upack size that Cargo permits /// Get the maximum upack size that Cargo permits

View File

@ -73,7 +73,8 @@ pub fn public_token_from_credential(
source_id: &SourceId, source_id: &SourceId,
mutation: Option<&'_ Mutation<'_>>, mutation: Option<&'_ Mutation<'_>>,
) -> CargoResult<Secret<String>> { ) -> CargoResult<Secret<String>> {
let RegistryCredentialConfig::AsymmetricKey((secret_key, secret_key_subject)) = credential else { let RegistryCredentialConfig::AsymmetricKey((secret_key, secret_key_subject)) = credential
else {
anyhow::bail!("credential must be an asymmetric secret key") anyhow::bail!("credential must be an asymmetric secret key")
}; };

View File

@ -207,7 +207,11 @@ impl DocFragment {
let syn::Meta::NameValue(nv) = &attr.meta else { let syn::Meta::NameValue(nv) = &attr.meta else {
anyhow::bail!("unsupported attr meta for {:?}", attr.meta.path()) anyhow::bail!("unsupported attr meta for {:?}", attr.meta.path())
}; };
let syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::Str(lit), .. }) = &nv.value else { let syn::Expr::Lit(syn::ExprLit {
lit: syn::Lit::Str(lit),
..
}) = &nv.value
else {
anyhow::bail!("only string literals are supported") anyhow::bail!("only string literals are supported")
}; };
Ok(Self { Ok(Self {
@ -373,13 +377,18 @@ fn unindent_doc_fragments(docs: &mut [DocFragment]) {
let Some(min_indent) = docs let Some(min_indent) = docs
.iter() .iter()
.map(|fragment| { .map(|fragment| {
fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| { fragment
.doc
.as_str()
.lines()
.fold(usize::MAX, |min_indent, line| {
if line.chars().all(|c| c.is_whitespace()) { if line.chars().all(|c| c.is_whitespace()) {
min_indent min_indent
} else { } else {
// Compare against either space or tab, ignoring whether they are // Compare against either space or tab, ignoring whether they are
// mixed or not. // mixed or not.
let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count(); let whitespace =
line.chars().take_while(|c| *c == ' ' || *c == '\t').count();
min_indent.min(whitespace) min_indent.min(whitespace)
} }
}) })

View File

@ -2835,7 +2835,9 @@ fn parse_unstable_lints<T: Deserialize<'static>>(
config: &Config, config: &Config,
warnings: &mut Vec<String>, warnings: &mut Vec<String>,
) -> CargoResult<Option<T>> { ) -> CargoResult<Option<T>> {
let Some(lints) = lints else { return Ok(None); }; let Some(lints) = lints else {
return Ok(None);
};
if !config.cli_unstable().lints { if !config.cli_unstable().lints {
warn_for_lint_feature(config, warnings); warn_for_lint_feature(config, warnings);
@ -2878,7 +2880,9 @@ switch to nightly channel you can pass
} }
fn verify_lints(lints: Option<TomlLints>) -> CargoResult<Option<TomlLints>> { fn verify_lints(lints: Option<TomlLints>) -> CargoResult<Option<TomlLints>> {
let Some(lints) = lints else { return Ok(None); }; let Some(lints) = lints else {
return Ok(None);
};
for (tool, lints) in &lints { for (tool, lints) in &lints {
let supported = ["rust", "clippy", "rustdoc"]; let supported = ["rust", "clippy", "rustdoc"];

View File

@ -314,9 +314,9 @@ fn cargo_bench_failing_test() {
[RUNNING] [..] (target/release/deps/foo-[..][EXE])", [RUNNING] [..] (target/release/deps/foo-[..][EXE])",
) )
.with_stdout_contains("[..]thread '[..]' panicked at[..]") .with_stdout_contains("[..]thread '[..]' panicked at[..]")
.with_stdout_contains("[..]assertion failed[..]") .with_stdout_contains("[..]assertion [..]failed[..]")
.with_stdout_contains("[..]left: `\"hello\"`[..]") .with_stdout_contains("[..]left: [..]\"hello\"[..]")
.with_stdout_contains("[..]right: `\"nope\"`[..]") .with_stdout_contains("[..]right: [..]\"nope\"[..]")
.with_stdout_contains("[..]src/main.rs:15[..]") .with_stdout_contains("[..]src/main.rs:15[..]")
.with_status(101) .with_status(101)
.run(); .run();

View File

@ -389,10 +389,10 @@ failures:
---- test_hello stdout ---- ---- test_hello stdout ----
[..]thread '[..]' panicked at [..]", [..]thread '[..]' panicked at [..]",
) )
.with_stdout_contains("[..]assertion failed[..]") .with_stdout_contains("[..]assertion [..]failed[..]")
.with_stdout_contains("[..]`(left == right)`[..]") .with_stdout_contains("[..]left == right[..]")
.with_stdout_contains("[..]left: `\"hello\"`,[..]") .with_stdout_contains("[..]left: [..]\"hello\"[..]")
.with_stdout_contains("[..]right: `\"nope\"`[..]") .with_stdout_contains("[..]right: [..]\"nope\"[..]")
.with_stdout_contains("[..]src/main.rs:12[..]") .with_stdout_contains("[..]src/main.rs:12[..]")
.with_stdout_contains( .with_stdout_contains(
"\ "\