a large number of clippy suggestions

This commit is contained in:
Eh2406 2018-07-20 15:39:05 -04:00
parent 207c2cb14d
commit 385b54b38f
36 changed files with 176 additions and 165 deletions

View File

@ -134,8 +134,8 @@ pub enum CompileMode {
impl CompileMode {
/// Returns true if the unit is being checked.
pub fn is_check(&self) -> bool {
match *self {
pub fn is_check(self) -> bool {
match self {
CompileMode::Check { .. } => true,
_ => false,
}
@ -144,8 +144,8 @@ impl CompileMode {
/// Returns true if this is a doc or doctest. Be careful using this.
/// Although both run rustdoc, the dependencies for those two modes are
/// very different.
pub fn is_doc(&self) -> bool {
match *self {
pub fn is_doc(self) -> bool {
match self {
CompileMode::Doc { .. } | CompileMode::Doctest => true,
_ => false,
}
@ -153,8 +153,8 @@ impl CompileMode {
/// Returns true if this is any type of test (test, benchmark, doctest, or
/// check-test).
pub fn is_any_test(&self) -> bool {
match *self {
pub fn is_any_test(self) -> bool {
match self {
CompileMode::Test
| CompileMode::Bench
| CompileMode::Check { test: true }
@ -164,8 +164,8 @@ impl CompileMode {
}
/// Returns true if this is the *execution* of a `build.rs` script.
pub fn is_run_custom_build(&self) -> bool {
*self == CompileMode::RunCustomBuild
pub fn is_run_custom_build(self) -> bool {
self == CompileMode::RunCustomBuild
}
/// List of all modes (currently used by `cargo clean -p` for computing

View File

@ -172,7 +172,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
.requested_target
.as_ref()
.map(|s| s.as_str())
.unwrap_or(self.host_triple())
.unwrap_or_else(|| self.host_triple())
}
/// Get the target configuration for a particular host or target
@ -193,7 +193,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
self.config,
&self.build_config.requested_target,
self.host_triple(),
self.info(&unit.kind).cfg(),
self.info(unit.kind).cfg(),
unit.kind,
"RUSTFLAGS",
)
@ -204,7 +204,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
self.config,
&self.build_config.requested_target,
self.host_triple(),
self.info(&unit.kind).cfg(),
self.info(unit.kind).cfg(),
unit.kind,
"RUSTDOCFLAGS",
)
@ -214,8 +214,8 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
pkg.source_id().is_path() || self.config.extra_verbose()
}
fn info(&self, kind: &Kind) -> &TargetInfo {
match *kind {
fn info(&self, kind: Kind) -> &TargetInfo {
match kind {
Kind::Host => &self.host_info,
Kind::Target => &self.target_info,
}

View File

@ -52,7 +52,7 @@ impl Invocation {
package_version: id.version().clone(),
kind: unit.kind,
target_kind: unit.target.kind().clone(),
deps: deps,
deps,
outputs: Vec::new(),
links: BTreeMap::new(),
program: String::new(),
@ -69,7 +69,7 @@ impl Invocation {
}
}
pub fn update_cmd(&mut self, cmd: ProcessBuilder) -> CargoResult<()> {
pub fn update_cmd(&mut self, cmd: &ProcessBuilder) -> CargoResult<()> {
self.program = cmd.get_program()
.to_str()
.ok_or_else(|| format_err!("unicode program string required"))?
@ -121,11 +121,11 @@ impl BuildPlan {
pub fn update(
&mut self,
invocation_name: String,
cmd: ProcessBuilder,
outputs: Arc<Vec<OutputFile>>,
invocation_name: &str,
cmd: &ProcessBuilder,
outputs: &Arc<Vec<OutputFile>>,
) -> CargoResult<()> {
let id = self.invocation_map[&invocation_name];
let id = self.invocation_map[invocation_name];
let invocation = self.plan
.invocations
.get_mut(id)

View File

@ -28,7 +28,7 @@ pub fn build_unit_dependencies<'a, 'cfg>(
bcx: &BuildContext<'a, 'cfg>,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
) -> CargoResult<()> {
assert!(deps.len() == 0, "can only build unit deps once");
assert!(deps.is_empty(), "can only build unit deps once");
for unit in roots.iter() {
// Dependencies of tests/benches should not have `panic` set.
@ -78,7 +78,7 @@ fn deps_of<'a, 'cfg>(
/// for that package.
/// This returns a vec of `(Unit, ProfileFor)` pairs. The `ProfileFor`
/// is the profile type that should be used for dependencies of the unit.
fn compute_deps<'a, 'b, 'cfg>(
fn compute_deps<'a, 'cfg>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>,
profile_for: ProfileFor,
@ -93,7 +93,7 @@ fn compute_deps<'a, 'b, 'cfg>(
let id = unit.pkg.package_id();
let deps = bcx.resolve.deps(id);
let mut ret = deps.filter(|&(_id, deps)| {
assert!(deps.len() > 0);
assert!(!deps.is_empty());
deps.iter().any(|dep| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
@ -128,7 +128,7 @@ fn compute_deps<'a, 'b, 'cfg>(
})
}).filter_map(|(id, _)| match bcx.get_package(id) {
Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let mode = check_or_build_mode(&unit.mode, t);
let mode = check_or_build_mode(unit.mode, t);
let unit = new_unit(bcx, pkg, t, profile_for, unit.kind.for_target(t), mode);
Ok((unit, profile_for))
}),
@ -247,7 +247,7 @@ fn compute_deps_doc<'a, 'cfg>(
};
// rustdoc only needs rmeta files for regular dependencies.
// However, for plugins/proc-macros, deps should be built like normal.
let mode = check_or_build_mode(&unit.mode, lib);
let mode = check_or_build_mode(unit.mode, lib);
let lib_unit = new_unit(
bcx,
dep,
@ -287,7 +287,7 @@ fn maybe_lib<'a>(
profile_for: ProfileFor,
) -> Option<(Unit<'a>, ProfileFor)> {
unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
let mode = check_or_build_mode(&unit.mode, t);
let mode = check_or_build_mode(unit.mode, t);
let unit = new_unit(bcx, unit.pkg, t, profile_for, unit.kind.for_target(t), mode);
(unit, profile_for)
})
@ -322,8 +322,8 @@ fn dep_build_script<'a>(unit: &Unit<'a>, bcx: &BuildContext) -> Option<(Unit<'a>
}
/// Choose the correct mode for dependencies.
fn check_or_build_mode(mode: &CompileMode, target: &Target) -> CompileMode {
match *mode {
fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode {
match mode {
CompileMode::Check { .. } | CompileMode::Doc { .. } => {
if target.for_host() {
// Plugin and proc-macro targets should be compiled like
@ -390,7 +390,7 @@ fn connect_run_custom_build_deps<'a>(
for dep in deps {
if dep.mode == CompileMode::RunCustomBuild {
reverse_deps.entry(dep)
.or_insert(HashSet::new())
.or_insert_with(HashSet::new)
.insert(unit);
}
}

View File

@ -120,7 +120,7 @@ impl<'a> JobQueue<'a> {
let key = Key::new(unit);
let deps = key.dependencies(cx)?;
self.queue
.queue(Fresh, key, Vec::new(), &deps)
.queue(Fresh, &key, Vec::new(), &deps)
.push((job, fresh));
*self.counts.entry(key.pkg).or_insert(0) += 1;
Ok(())
@ -277,7 +277,7 @@ impl<'a> JobQueue<'a> {
.verbose(|c| c.status("Running", &cmd))?;
}
Message::BuildPlanMsg(module_name, cmd, filenames) => {
plan.update(module_name, cmd, filenames)?;
plan.update(&module_name, &cmd, &filenames)?;
}
Message::Stdout(out) => {
if cx.bcx.config.extra_verbose() {
@ -314,7 +314,7 @@ impl<'a> JobQueue<'a> {
if !self.active.is_empty() {
error = Some(format_err!("build failed"));
handle_error(e, &mut *cx.bcx.config.shell());
handle_error(&e, &mut *cx.bcx.config.shell());
cx.bcx.config.shell().warn(
"build failed, waiting for other \
jobs to finish...",
@ -430,7 +430,7 @@ impl<'a> JobQueue<'a> {
if !output.warnings.is_empty() && msg.is_some() {
// Output an empty line.
writeln!(bcx.config.shell().err(), "")?;
writeln!(bcx.config.shell().err())?;
}
}

View File

@ -934,11 +934,11 @@ fn envify(s: &str) -> String {
}
impl Kind {
fn for_target(&self, target: &Target) -> Kind {
fn for_target(self, target: &Target) -> Kind {
// Once we start compiling for the `Host` kind we continue doing so, but
// if we are a `Target` kind and then we start compiling for a target
// that needs to be on the host we lift ourselves up to `Host`
match *self {
match self {
Kind::Host => Kind::Host,
Kind::Target if target.for_host() => Kind::Host,
Kind::Target => Kind::Target,

View File

@ -111,7 +111,7 @@ pub fn output_depinfo<'a, 'b>(cx: &mut Context<'a, 'b>, unit: &Unit<'a>) -> Carg
for dep in &deps {
write!(outfile, " {}", dep)?;
}
writeln!(outfile, "")?;
writeln!(outfile)?;
// dep-info generation failed, so delete output file. This will
// usually cause the build system to always rerun the build

View File

@ -406,11 +406,12 @@ impl<'r> Requirements<'r> {
fn require_value<'f>(&mut self, fv: &'f FeatureValue) -> CargoResult<()> {
match fv {
FeatureValue::Feature(feat) => self.require_feature(*feat),
FeatureValue::Crate(dep) => Ok(self.require_dependency(*dep)),
FeatureValue::Feature(feat) => self.require_feature(*feat)?,
FeatureValue::Crate(dep) => self.require_dependency(*dep),
FeatureValue::CrateFeature(dep, dep_feat) => {
Ok(self.require_crate_feature(*dep, *dep_feat))
self.require_crate_feature(*dep, *dep_feat)
}
}
};
Ok(())
}
}

View File

@ -733,7 +733,7 @@ impl RemainingCandidates {
if a != b.summary.package_id() {
conflicting_prev_active
.entry(a.clone())
.or_insert(ConflictReason::Links(link));
.or_insert_with(|| ConflictReason::Links(link));
continue;
}
}

View File

@ -237,10 +237,10 @@ unable to verify that `{0}` is the same as when the lockfile was generated
impl fmt::Debug for Resolve {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "graph: {:?}\n", self.graph)?;
write!(fmt, "\nfeatures: {{\n")?;
writeln!(fmt, "graph: {:?}", self.graph)?;
writeln!(fmt, "\nfeatures: {{")?;
for (pkg, features) in &self.features {
write!(fmt, " {}: {:?}\n", pkg, features)?;
writeln!(fmt, " {}: {:?}", pkg, features)?;
}
write!(fmt, "}}")
}

View File

@ -260,7 +260,7 @@ impl ShellOut {
}
stream.reset()?;
match message {
Some(message) => write!(stream, " {}\n", message)?,
Some(message) => writeln!(stream, " {}", message)?,
None => write!(stream, " ")?,
}
}
@ -271,7 +271,7 @@ impl ShellOut {
write!(w, "{}", status)?;
}
match message {
Some(message) => write!(w, " {}\n", message)?,
Some(message) => writeln!(w, " {}", message)?,
None => write!(w, " ")?,
}
}
@ -290,8 +290,8 @@ impl ShellOut {
impl ColorChoice {
/// Convert our color choice to termcolor's version
fn to_termcolor_color_choice(&self) -> termcolor::ColorChoice {
match *self {
fn to_termcolor_color_choice(self) -> termcolor::ColorChoice {
match self {
ColorChoice::Always => termcolor::ColorChoice::Always,
ColorChoice::Never => termcolor::ColorChoice::Never,
ColorChoice::CargoAuto => {

View File

@ -148,7 +148,7 @@ where K: Borrow<str> + Ord + Display {
for dep in dependencies.iter() {
dep_map
.entry(dep.name().as_str())
.or_insert(Vec::new())
.or_insert_with(Vec::new)
.push(dep);
}

View File

@ -3,6 +3,14 @@
// But if someone runs it they should know that
// @alexcrichton disagree with clippy on some style things
#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]
#![cfg_attr(feature = "cargo-clippy", allow(explicit_into_iter_loop))]
// alsow we use closures as a alternative to try catch blocks
#![cfg_attr(feature = "cargo-clippy", allow(redundant_closure_call))]
// we have lots of arguments, cleaning this up would be a large project
#![cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))]
// we have some complicated functions, cleaning this up would be a large project
#![cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))]
extern crate atty;
extern crate clap;
@ -156,15 +164,15 @@ pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
std::process::exit(exit_code)
}
pub fn handle_error(err: CargoError, shell: &mut Shell) {
debug!("handle_error; err={:?}", &err);
pub fn handle_error(err: &CargoError, shell: &mut Shell) {
debug!("handle_error; err={:?}", err);
let _ignored_result = shell.error(&err);
handle_cause(&err, shell);
let _ignored_result = shell.error(err);
handle_cause(err, shell);
}
fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool {
fn print(error: String, shell: &mut Shell) {
fn print(error: &str, shell: &mut Shell) {
drop(writeln!(shell.err(), "\nCaused by:"));
drop(writeln!(shell.err(), " {}", error));
}
@ -175,7 +183,7 @@ fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool {
// The first error has already been printed to the shell
// Print all remaining errors
for err in cargo_err.causes().skip(1) {
print(err.to_string(), shell);
print(&err.to_string(), shell);
}
} else {
// The first error has already been printed to the shell
@ -185,7 +193,7 @@ fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool {
return false;
}
print(err.to_string(), shell);
print(&err.to_string(), shell);
}
}

View File

@ -300,7 +300,7 @@ pub fn compile_ws<'a>(
cx.compile(&units, export_dir.clone(), &exec)?
};
return Ok(ret);
Ok(ret)
}
impl FilterRule {

View File

@ -99,7 +99,7 @@ pub fn install(
) {
Ok(()) => succeeded.push(krate),
Err(e) => {
::handle_error(e, &mut opts.config.shell());
::handle_error(&e, &mut opts.config.shell());
failed.push(krate)
}
}
@ -726,7 +726,7 @@ pub fn uninstall(
match uninstall_one(&root, spec, bins, config) {
Ok(()) => succeeded.push(spec),
Err(e) => {
::handle_error(e, &mut config.shell());
::handle_error(&e, &mut config.shell());
failed.push(spec)
}
}

View File

@ -39,8 +39,8 @@ pub enum NewProjectKind {
}
impl NewProjectKind {
fn is_bin(&self) -> bool {
*self == NewProjectKind::Bin
fn is_bin(self) -> bool {
self == NewProjectKind::Bin
}
}
@ -190,11 +190,11 @@ fn detect_source_paths_and_types(
let tests = vec![
Test {
proposed_path: format!("src/main.rs"),
proposed_path: "src/main.rs".to_string(),
handling: H::Bin,
},
Test {
proposed_path: format!("main.rs"),
proposed_path: "main.rs".to_string(),
handling: H::Bin,
},
Test {
@ -206,11 +206,11 @@ fn detect_source_paths_and_types(
handling: H::Detect,
},
Test {
proposed_path: format!("src/lib.rs"),
proposed_path: "src/lib.rs".to_string(),
handling: H::Lib,
},
Test {
proposed_path: format!("lib.rs"),
proposed_path: "lib.rs".to_string(),
handling: H::Lib,
},
];

View File

@ -39,15 +39,12 @@ pub fn run(
if bins.len() == 1 {
let &(name, kind) = bins.first().unwrap();
match kind {
&TargetKind::ExampleLib(..) => {
bail!(
if let TargetKind::ExampleLib(..) = kind {
bail!(
"example target `{}` is a library and cannot be executed",
name
)
},
_ => { }
};
)
}
}
if bins.len() > 1 {

View File

@ -88,7 +88,7 @@ fn check_version_control(opts: &FixOptions) -> CargoResult<()> {
}
}
if dirty_files.len() == 0 {
if dirty_files.is_empty() {
return Ok(())
}
@ -119,8 +119,7 @@ pub fn fix_maybe_exec_rustc() -> CargoResult<bool> {
let filename = env::args()
.skip(1)
.filter(|s| s.ends_with(".rs"))
.filter(|s| Path::new(s).exists())
.next();
.find(|s| Path::new(s).exists());
trace!("cargo-fix as rustc got file {:?}", filename);
let rustc = env::var_os("RUSTC").expect("failed to find RUSTC env var");
@ -152,7 +151,7 @@ pub fn fix_maybe_exec_rustc() -> CargoResult<bool> {
cmd.args(env::args().skip(1));
cmd.arg("--cap-lints=warn");
cmd.arg("--error-format=json");
if fixes.original_files.len() > 0 {
if !fixes.original_files.is_empty() {
let output = cmd.output().context("failed to spawn rustc")?;
if output.status.success() {
@ -164,7 +163,7 @@ pub fn fix_maybe_exec_rustc() -> CargoResult<bool> {
// If we succeeded then we'll want to commit to the changes we made, if
// any. If stderr is empty then there's no need for the final exec at
// the end, we just bail out here.
if output.status.success() && output.stderr.len() == 0 {
if output.status.success() && output.stderr.is_empty() {
return Ok(true);
}
@ -304,7 +303,7 @@ fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &str)
match rustfix::apply_suggestions(&code, &suggestions) {
Err(e) => {
Message::ReplaceFailed {
file: file,
file,
message: e.to_string(),
}.post()?;
// TODO: Add flag to decide if we want to continue or bail out

View File

@ -160,12 +160,12 @@ pub fn resolve_with_previous<'a, 'cfg>(
);
}
let ref keep = |p: &&'a PackageId| {
let keep = &(|p: &&'a PackageId| {
!to_avoid_sources.contains(&p.source_id()) && match to_avoid {
Some(set) => !set.contains(p),
None => true,
}
};
});
// In the case where a previous instance of resolve is available, we
// want to lock as many packages as possible to the previous version
@ -535,7 +535,7 @@ fn register_previous_locks<'a>(
// function let's put it to action. Take a look at the previous lockfile,
// filter everything by this callback, and then shove everything else into
// the registry as a locked dependency.
let ref keep = |id: &&'a PackageId| keep(id) && !avoid_locking.contains(id);
let keep = &(|id: &&'a PackageId| keep(id) && !avoid_locking.contains(id));
for node in resolve.iter().filter(keep) {
let deps = resolve

View File

@ -188,7 +188,7 @@ impl<'cfg> Source for GitSource<'cfg> {
// Dont use the full hash,
// to contribute less to reaching the path length limit on Windows:
// https://github.com/servo/servo/pull/14397
let short_id = db.to_short_id(actual_rev.clone()).unwrap();
let short_id = db.to_short_id(&actual_rev).unwrap();
let checkout_path = lock.parent()
.join("checkouts")

View File

@ -189,7 +189,7 @@ impl GitDatabase {
Ok(checkout)
}
pub fn to_short_id(&self, revision: GitRevision) -> CargoResult<GitShortID> {
pub fn to_short_id(&self, revision: &GitRevision) -> CargoResult<GitShortID> {
let obj = self.repo.find_object(revision.0, None)?;
Ok(GitShortID(obj.short_id()?))
}

View File

@ -139,7 +139,7 @@ impl<'cfg> PathSource<'cfg> {
.collect::<Result<Vec<_>, _>>()?;
let glob_should_package = |relative_path: &Path| -> bool {
fn glob_match(patterns: &Vec<Pattern>, relative_path: &Path) -> bool {
fn glob_match(patterns: &[Pattern], relative_path: &Path) -> bool {
patterns
.iter()
.any(|pattern| pattern.matches_path(relative_path))
@ -277,7 +277,7 @@ impl<'cfg> PathSource<'cfg> {
};
let path = util::without_prefix(root, cur).unwrap().join("Cargo.toml");
if index.get_path(&path, 0).is_some() {
return Some(self.list_files_git(pkg, repo, filter));
return Some(self.list_files_git(pkg, &repo, filter));
}
}
}
@ -296,7 +296,7 @@ impl<'cfg> PathSource<'cfg> {
fn list_files_git(
&self,
pkg: &Package,
repo: git2::Repository,
repo: &git2::Repository,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<Vec<PathBuf>> {
warn!("list_files_git {}", pkg.package_id());
@ -380,7 +380,7 @@ impl<'cfg> PathSource<'cfg> {
let rel = rel.replace(r"\", "/");
match repo.find_submodule(&rel).and_then(|s| s.open()) {
Ok(repo) => {
let files = self.list_files_git(pkg, repo, filter)?;
let files = self.list_files_git(pkg, &repo, filter)?;
ret.extend(files.into_iter());
}
Err(..) => {
@ -480,7 +480,7 @@ impl<'cfg> PathSource<'cfg> {
// condition where this path was rm'ed - either way,
// we can ignore the error and treat the path's mtime
// as 0.
let mtime = paths::mtime(&file).unwrap_or(FileTime::zero());
let mtime = paths::mtime(&file).unwrap_or_else(|_| FileTime::zero());
if mtime > max {
max = mtime;
max_path = file;

View File

@ -126,11 +126,11 @@ impl<'a> Parser<'a> {
Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "any"))) => {
self.t.next();
let mut e = Vec::new();
self.eat(Token::LeftParen)?;
while !self.try(Token::RightParen) {
self.eat(&Token::LeftParen)?;
while !self.try(&Token::RightParen) {
e.push(self.expr()?);
if !self.try(Token::Comma) {
self.eat(Token::RightParen)?;
if !self.try(&Token::Comma) {
self.eat(&Token::RightParen)?;
break;
}
}
@ -142,9 +142,9 @@ impl<'a> Parser<'a> {
}
Some(&Ok(Token::Ident("not"))) => {
self.t.next();
self.eat(Token::LeftParen)?;
self.eat(&Token::LeftParen)?;
let e = self.expr()?;
self.eat(Token::RightParen)?;
self.eat(&Token::RightParen)?;
Ok(CfgExpr::Not(Box::new(e)))
}
Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
@ -159,7 +159,7 @@ impl<'a> Parser<'a> {
fn cfg(&mut self) -> CargoResult<Cfg> {
match self.t.next() {
Some(Ok(Token::Ident(name))) => {
let e = if self.try(Token::Equals) {
let e = if self.try(&Token::Equals) {
let val = match self.t.next() {
Some(Ok(Token::String(s))) => s,
Some(Ok(t)) => bail!("expected a string, found {}", t.classify()),
@ -178,18 +178,18 @@ impl<'a> Parser<'a> {
}
}
fn try(&mut self, token: Token<'a>) -> bool {
fn try(&mut self, token: &Token<'a>) -> bool {
match self.t.peek() {
Some(&Ok(ref t)) if token == *t => {}
Some(&Ok(ref t)) if token == t => {}
_ => return false,
}
self.t.next();
true
}
fn eat(&mut self, token: Token<'a>) -> CargoResult<()> {
fn eat(&mut self, token: &Token<'a>) -> CargoResult<()> {
match self.t.next() {
Some(Ok(ref t)) if token == *t => Ok(()),
Some(Ok(ref t)) if token == t => Ok(()),
Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), t.classify()),
Some(Err(e)) => Err(e),
None => bail!("expected {}, but cfg expr ended", token.classify()),

View File

@ -103,7 +103,7 @@ impl Config {
})
.collect();
let cache_rustc_info = match env.get("CARGO_CACHE_RUSTC_INFO".into()) {
let cache_rustc_info = match env.get("CARGO_CACHE_RUSTC_INFO") {
Some(cache) => cache != "0",
_ => true,
};
@ -235,7 +235,7 @@ impl Config {
let argv0 = env::args_os()
.map(PathBuf::from)
.next()
.ok_or(format_err!("no argv[0]"))?;
.ok_or_else(||format_err!("no argv[0]"))?;
paths::resolve_executable(&argv0)
}
@ -467,7 +467,7 @@ impl Config {
val: i,
definition: Definition::Path(path),
})),
Some(val) => self.expected("list", key, val),
Some(val) => self.expected("list", key, &val),
None => Ok(None),
}
}
@ -489,7 +489,7 @@ impl Config {
val: i.split(' ').map(str::to_string).collect(),
definition: Definition::Path(path),
})),
Some(val) => self.expected("list or string", key, val),
Some(val) => self.expected("list or string", key, &val),
None => Ok(None),
}
}
@ -500,7 +500,7 @@ impl Config {
val: i,
definition: Definition::Path(path),
})),
Some(val) => self.expected("table", key, val),
Some(val) => self.expected("table", key, &val),
None => Ok(None),
}
}
@ -521,13 +521,13 @@ impl Config {
val: i,
definition: Definition::Path(path),
})),
Some(cv) => return Err(ConfigError::expected(&config_key, "an integer", &cv)),
None => return Ok(None),
Some(cv) => Err(ConfigError::expected(&config_key, "an integer", &cv)),
None => Ok(None),
},
}
}
fn expected<T>(&self, ty: &str, key: &str, val: CV) -> CargoResult<T> {
fn expected<T>(&self, ty: &str, key: &str, val: &CV) -> CargoResult<T> {
val.expected(ty, key)
.map_err(|e| format_err!("invalid configuration for key `{}`\n{}", key, e))
}
@ -659,7 +659,7 @@ impl Config {
fn load_credentials(&self, cfg: &mut ConfigValue) -> CargoResult<()> {
let home_path = self.home_path.clone().into_path_unlocked();
let credentials = home_path.join("credentials");
if !fs::metadata(&credentials).is_ok() {
if fs::metadata(&credentials).is_err() {
return Ok(());
}
@ -719,7 +719,7 @@ impl Config {
.collect::<String>();
if let Some(tool_path) = env::var_os(&var) {
let maybe_relative = match tool_path.to_str() {
Some(s) => s.contains("/") || s.contains("\\"),
Some(s) => s.contains('/') || s.contains('\\'),
None => false,
};
let path = if maybe_relative {
@ -891,14 +891,14 @@ impl ConfigError {
}
}
fn missing(key: String) -> ConfigError {
fn missing(key: &str) -> ConfigError {
ConfigError {
error: format_err!("missing config key `{}`", key),
definition: None,
}
}
fn with_key_context(self, key: String, definition: Definition) -> ConfigError {
fn with_key_context(self, key: &str, definition: Definition) -> ConfigError {
ConfigError {
error: format_err!("could not load config key `{}`: {}", key, self),
definition: Some(definition),
@ -965,10 +965,10 @@ macro_rules! deserialize_method {
V: de::Visitor<'de>,
{
let v = self.config.$getter(&self.key)?.ok_or_else(||
ConfigError::missing(self.key.to_config()))?;
ConfigError::missing(&self.key.to_config()))?;
let Value{val, definition} = v;
let res: Result<V::Value, ConfigError> = visitor.$visit(val);
res.map_err(|e| e.with_key_context(self.key.to_config(), definition))
res.map_err(|e| e.with_key_context(&self.key.to_config(), definition))
}
}
}
@ -989,16 +989,16 @@ impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
} else if let Ok(v) = v.parse::<i64>() {
visitor.visit_i64(v)
} else if self.config.cli_unstable().advanced_env
&& v.starts_with("[")
&& v.ends_with("]")
&& v.starts_with('[')
&& v.ends_with(']')
{
visitor.visit_seq(ConfigSeqAccess::new(self.config, self.key.clone())?)
visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
} else {
visitor.visit_string(v.clone())
};
return res.map_err(|e| {
e.with_key_context(
self.key.to_config(),
&self.key.to_config(),
Definition::Environment(self.key.to_env()),
)
});
@ -1010,7 +1010,7 @@ impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
CV::Integer(i, path) => (visitor.visit_i64(i), path),
CV::String(s, path) => (visitor.visit_string(s), path),
CV::List(_, path) => (
visitor.visit_seq(ConfigSeqAccess::new(self.config, self.key.clone())?),
visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?),
path,
),
CV::Table(_, path) => (
@ -1021,9 +1021,9 @@ impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
};
let (res, path) = res;
return res
.map_err(|e| e.with_key_context(self.key.to_config(), Definition::Path(path)));
.map_err(|e| e.with_key_context(&self.key.to_config(), Definition::Path(path)));
}
Err(ConfigError::missing(self.key.to_config()))
Err(ConfigError::missing(&self.key.to_config()))
}
deserialize_method!(deserialize_bool, visit_bool, get_bool_priv);
@ -1072,14 +1072,14 @@ impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
where
V: de::Visitor<'de>,
{
visitor.visit_seq(ConfigSeqAccess::new(self.config, self.key)?)
visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
}
fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
visitor.visit_seq(ConfigSeqAccess::new(self.config, self.key)?)
visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
}
fn deserialize_tuple_struct<V>(
@ -1091,7 +1091,7 @@ impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
where
V: de::Visitor<'de>,
{
visitor.visit_seq(ConfigSeqAccess::new(self.config, self.key)?)
visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
}
fn deserialize_newtype_struct<V>(
@ -1113,7 +1113,7 @@ impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
.to_string();
visitor.visit_newtype_struct(path.into_deserializer())
}
None => Err(ConfigError::missing(self.key.to_config())),
None => Err(ConfigError::missing(&self.key.to_config())),
}
} else {
visitor.visit_newtype_struct(self)
@ -1155,7 +1155,7 @@ impl<'config> ConfigMapAccess<'config> {
// CARGO_PROFILE_DEV_OVERRIDES_bar_OPT_LEVEL = 3
let rest = &env_key[env_pattern.len()..];
// rest = bar_OPT_LEVEL
let part = rest.splitn(2, "_").next().unwrap();
let part = rest.splitn(2, '_').next().unwrap();
// part = "bar"
set.insert(ConfigKeyPart::CasePart(part.to_string()));
}
@ -1234,7 +1234,7 @@ struct ConfigSeqAccess {
}
impl ConfigSeqAccess {
fn new(config: &Config, key: ConfigKey) -> Result<ConfigSeqAccess, ConfigError> {
fn new(config: &Config, key: &ConfigKey) -> Result<ConfigSeqAccess, ConfigError> {
let mut res = Vec::new();
if let Some(v) = config.get_list(&key.to_config())? {
for (s, path) in v.val {
@ -1247,7 +1247,7 @@ impl ConfigSeqAccess {
let env_key = key.to_env();
let def = Definition::Environment(env_key.clone());
if let Some(v) = config.env.get(&env_key) {
if !(v.starts_with("[") && v.ends_with("]")) {
if !(v.starts_with('[') && v.ends_with(']')) {
return Err(ConfigError::new(
format!("should have TOML list syntax, found `{}`", v),
def.clone(),

View File

@ -50,8 +50,8 @@ pub enum Freshness {
}
impl Freshness {
pub fn combine(&self, other: Freshness) -> Freshness {
match *self {
pub fn combine(self, other: Freshness) -> Freshness {
match self {
Fresh => other,
Dirty => Dirty,
}
@ -80,7 +80,7 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
///
/// It is assumed that any dependencies of this package will eventually also
/// be added to the dependency queue.
pub fn queue(&mut self, fresh: Freshness, key: K, value: V, dependencies: &[K]) -> &mut V {
pub fn queue(&mut self, fresh: Freshness, key: &K, value: V, dependencies: &[K]) -> &mut V {
let slot = match self.dep_map.entry(key.clone()) {
Occupied(v) => return &mut v.into_mut().1,
Vacant(v) => v,
@ -207,11 +207,11 @@ mod test {
fn deep_first() {
let mut q = DependencyQueue::new();
q.queue(Freshness::Fresh, 1, (), &[]);
q.queue(Freshness::Fresh, 2, (), &[1]);
q.queue(Freshness::Fresh, 3, (), &[]);
q.queue(Freshness::Fresh, 4, (), &[2, 3]);
q.queue(Freshness::Fresh, 5, (), &[4, 3]);
q.queue(Freshness::Fresh, &1, (), &[]);
q.queue(Freshness::Fresh, &2, (), &[1]);
q.queue(Freshness::Fresh, &3, (), &[]);
q.queue(Freshness::Fresh, &4, (), &[2, 3]);
q.queue(Freshness::Fresh, &5, (), &[4, 3]);
q.queue_finished();
assert_eq!(q.dequeue(), Some((Freshness::Fresh, 1, ())));

View File

@ -101,16 +101,16 @@ impl Message {
"failed to automatically apply fixes suggested by rustc"
)?;
}
if files.len() > 0 {
write!(
if !files.is_empty() {
writeln!(
config.shell().err(),
"\nafter fixes were automatically applied the compiler \
reported errors within these files:\n\n"
reported errors within these files:\n"
)?;
for file in files {
write!(config.shell().err(), " * {}\n", file)?;
writeln!(config.shell().err(), " * {}", file)?;
}
write!(config.shell().err(), "\n")?;
writeln!(config.shell().err())?;
}
write!(config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?;
Ok(())

View File

@ -84,8 +84,7 @@ impl<N: Eq + Hash + Clone, E: Default> Graph<N, E> {
.filter(|&(_node, adjacent)| adjacent.contains_key(pkg))
// Note that we can have "cycles" introduced through dev-dependency
// edges, so make sure we don't loop infinitely.
.filter(|&(node, _)| !res.contains(&node))
.next()
.find(|&(node, _)| !res.contains(&node))
.map(|p| p.0)
};
while let Some(p) = first_pkg_depending_on(pkg, &result) {

View File

@ -48,6 +48,7 @@ mod imp {
use std::io;
use std::mem;
use std::os::windows::prelude::*;
use std::ptr;
use self::winapi::shared::basetsd::*;
use self::winapi::shared::minwindef::*;
@ -84,7 +85,7 @@ mod imp {
// use job objects, so we instead just ignore errors and assume that
// we're otherwise part of someone else's job object in this case.
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
let job = CreateJobObjectW(ptr::null_mut(), ptr::null());
if job.is_null() {
return None;
}
@ -167,7 +168,7 @@ mod imp {
JobObjectBasicProcessIdList,
&mut jobs as *mut _ as LPVOID,
mem::size_of_val(&jobs) as DWORD,
0 as *mut _,
ptr::null_mut(),
);
if r == 0 {
info!("failed to query job object: {}", last_err());
@ -176,7 +177,7 @@ mod imp {
let mut killed = false;
let list = &jobs.list[..jobs.header.NumberOfProcessIdsInList as usize];
assert!(list.len() > 0);
assert!(!list.is_empty());
info!("found {} remaining processes", list.len() - 1);
let list = list.iter()

View File

@ -105,7 +105,7 @@ impl LockServer {
loop {
let mut client = {
let mut state = lock2.lock().unwrap();
if state.1.len() == 0 {
if state.1.is_empty() {
state.0 = false;
break;
} else {

View File

@ -62,7 +62,7 @@ where
remaining -= 1;
}
//todo impl from
Err(e) => return Err(e.into()),
Err(e) => return Err(e),
}
}
}

View File

@ -87,9 +87,9 @@ pub fn without_prefix<'a>(long_path: &'a Path, prefix: &'a Path) -> Option<&'a P
pub fn resolve_executable(exec: &Path) -> CargoResult<PathBuf> {
if exec.components().count() == 1 {
let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?;
let paths = env::var_os("PATH").ok_or_else(|| format_err!("no PATH"))?;
let candidates = env::split_paths(&paths).flat_map(|path| {
let candidate = PathBuf::from(path).join(&exec);
let candidate = path.join(&exec);
let with_exe = if env::consts::EXE_EXTENSION == "" {
None
} else {

View File

@ -8,7 +8,7 @@ use std::process::{Command, Output, Stdio};
use jobserver::Client;
use shell_escape::escape;
use util::{process_error, CargoError, CargoResult, CargoResultExt, read2};
use util::{process_error, CargoResult, CargoResultExt, read2};
/// A builder object for an external process, similar to `std::process::Command`.
#[derive(Clone, Debug)]
@ -280,7 +280,7 @@ impl ProcessBuilder {
Some(&output.status),
to_print,
);
return Err(CargoError::from(e).context(cx).into());
return Err(e.context(cx).into());
}
}

View File

@ -47,7 +47,7 @@ impl Drop for Profiler {
});
let duration = start.elapsed();
let duration_ms =
duration.as_secs() * 1000 + u64::from(duration.subsec_nanos() / 1_000_000);
duration.as_secs() * 1000 + u64::from(duration.subsec_millis());
let msg = (
stack_len,

View File

@ -21,3 +21,9 @@ impl Sha256 {
ret
}
}
impl Default for Sha256 {
fn default() -> Self {
Self::new()
}
}

View File

@ -417,7 +417,7 @@ impl<'de> de::Deserialize<'de> for ProfilePackageSpec {
} else {
PackageIdSpec::parse(&string)
.map_err(de::Error::custom)
.map(|s| ProfilePackageSpec::Spec(s))
.map(ProfilePackageSpec::Spec)
}
}
}
@ -869,7 +869,7 @@ impl TomlManifest {
{
let mut names_sources = BTreeMap::new();
for dep in &deps {
let name = dep.rename().unwrap_or(dep.name().as_str());
let name = dep.rename().unwrap_or_else(|| dep.name().as_str());
let prev = names_sources.insert(name.to_string(), dep.source_id());
if prev.is_some() && prev != Some(dep.source_id()) {
bail!(
@ -1131,13 +1131,13 @@ impl TomlManifest {
let build_rs = package_root.join("build.rs");
match *build {
Some(StringOrBool::Bool(false)) => None, // explicitly no build script
Some(StringOrBool::Bool(true)) => Some(build_rs.into()),
Some(StringOrBool::Bool(true)) => Some(build_rs),
Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)),
None => {
match fs::metadata(&build_rs) {
// If there is a build.rs file next to the Cargo.toml, assume it is
// a build script
Ok(ref e) if e.is_file() => Some(build_rs.into()),
Ok(ref e) if e.is_file() => Some(build_rs),
Ok(_) | Err(_) => None,
}
}

View File

@ -133,20 +133,20 @@ impl Registry {
pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<String> {
let body = serde_json::to_string(&OwnersReq { users: owners })?;
let body = self.put(format!("/crates/{}/owners", krate), body.as_bytes())?;
let body = self.put(&format!("/crates/{}/owners", krate), body.as_bytes())?;
assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
Ok(serde_json::from_str::<OwnerResponse>(&body)?.msg)
}
pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
let body = serde_json::to_string(&OwnersReq { users: owners })?;
let body = self.delete(format!("/crates/{}/owners", krate), Some(body.as_bytes()))?;
let body = self.delete(&format!("/crates/{}/owners", krate), Some(body.as_bytes()))?;
assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
Ok(())
}
pub fn list_owners(&mut self, krate: &str) -> Result<Vec<User>> {
let body = self.get(format!("/crates/{}/owners", krate))?;
let body = self.get(&format!("/crates/{}/owners", krate))?;
Ok(serde_json::from_str::<Users>(&body)?.users)
}
@ -228,7 +228,7 @@ impl Registry {
pub fn search(&mut self, query: &str, limit: u32) -> Result<(Vec<Crate>, u32)> {
let formatted_query = percent_encode(query.as_bytes(), QUERY_ENCODE_SET);
let body = self.req(
format!("/crates?q={}&per_page={}", formatted_query, limit),
&format!("/crates?q={}&per_page={}", formatted_query, limit),
None,
Auth::Unauthorized,
)?;
@ -238,33 +238,33 @@ impl Registry {
}
pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
let body = self.delete(format!("/crates/{}/{}/yank", krate, version), None)?;
let body = self.delete(&format!("/crates/{}/{}/yank", krate, version), None)?;
assert!(serde_json::from_str::<R>(&body)?.ok);
Ok(())
}
pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
let body = self.put(format!("/crates/{}/{}/unyank", krate, version), &[])?;
let body = self.put(&format!("/crates/{}/{}/unyank", krate, version), &[])?;
assert!(serde_json::from_str::<R>(&body)?.ok);
Ok(())
}
fn put(&mut self, path: String, b: &[u8]) -> Result<String> {
fn put(&mut self, path: &str, b: &[u8]) -> Result<String> {
self.handle.put(true)?;
self.req(path, Some(b), Auth::Authorized)
}
fn get(&mut self, path: String) -> Result<String> {
fn get(&mut self, path: &str) -> Result<String> {
self.handle.get(true)?;
self.req(path, None, Auth::Authorized)
}
fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result<String> {
fn delete(&mut self, path: &str, b: Option<&[u8]>) -> Result<String> {
self.handle.custom_request("DELETE")?;
self.req(path, b, Auth::Authorized)
}
fn req(&mut self, path: String, body: Option<&[u8]>, authorized: Auth) -> Result<String> {
fn req(&mut self, path: &str, body: Option<&[u8]>, authorized: Auth) -> Result<String> {
self.handle.url(&format!("{}/api/v1{}", self.host, path))?;
let mut headers = List::new();
headers.append("Accept: application/json")?;