intellij rust suggested fixes

(cherry picked from commit 24836e9)
This commit is contained in:
Eh2406 2018-02-23 18:27:53 -05:00
parent 62f05f41d3
commit 0247dc429a
61 changed files with 202 additions and 202 deletions

View File

@ -121,13 +121,13 @@ pub fn execute(mut options: Options, config: &mut Config) -> CliResult {
no_fail_fast: options.flag_no_fail_fast,
only_doc: false,
compile_opts: ops::CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| &s[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: spec,
spec,
release: true,
mode: ops::CompileMode::Bench,
filter: ops::CompileFilter::new(options.flag_lib,

View File

@ -105,13 +105,13 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
&options.flag_package)?;
let opts = CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: spec,
spec,
mode: ops::CompileMode::Build,
release: options.flag_release,
filter: ops::CompileFilter::new(options.flag_lib,

View File

@ -121,14 +121,14 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
};
let opts = CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: spec,
mode: ops::CompileMode::Check{test:test},
spec,
mode: ops::CompileMode::Check{test },
release: options.flag_release,
filter: ops::CompileFilter::new(options.flag_lib,
&options.flag_bin, options.flag_bins,

View File

@ -56,7 +56,7 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let opts = ops::CleanOptions {
config: config,
config,
spec: &options.flag_package,
target: options.flag_target.as_ref().map(|s| &s[..]),
release: options.flag_release,

View File

@ -98,13 +98,13 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
let doc_opts = ops::DocOptions {
open_result: options.flag_open,
compile_opts: ops::CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: spec,
spec,
filter: ops::CompileFilter::new(options.flag_lib,
&options.flag_bin, options.flag_bins,
&empty, false,

View File

@ -118,7 +118,7 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
&options.flag_z)?;
let compile_opts = ops::CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: None,
features: &options.flag_features,

View File

@ -54,7 +54,7 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let ws = Workspace::new(&root, config)?;
ops::package(&ws, &ops::PackageOpts {
config: config,
config,
verify: !options.flag_no_verify,
list: options.flag_list,
check_metadata: !options.flag_no_metadata,

View File

@ -97,17 +97,17 @@ about this warning.";
let root = find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd())?;
let ws = Workspace::new(&root, config)?;
ops::publish(&ws, &ops::PublishOpts {
config: config,
token: token,
config,
token,
index:
if host.clone().is_none() || host.clone().unwrap().is_empty() { index }
else { config.shell().warn(&msg)?; host }, // TODO: Deprecated, remove
verify: !no_verify,
allow_dirty: allow_dirty,
allow_dirty,
target: target.as_ref().map(|t| &t[..]),
jobs: jobs,
dry_run: dry_run,
registry: registry,
jobs,
dry_run,
registry,
})?;
Ok(())
}

View File

@ -86,13 +86,13 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
let spec = Packages::Packages(&packages);
let compile_opts = ops::CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: spec,
spec,
release: options.flag_release,
mode: ops::CompileMode::Build,
filter: if examples.is_empty() && bins.is_empty() {

View File

@ -113,14 +113,14 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
let spec = options.flag_package.map_or_else(Vec::new, |s| vec![s]);
let opts = CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: Packages::Packages(&spec),
mode: mode,
mode,
release: options.flag_release,
filter: ops::CompileFilter::new(options.flag_lib,
&options.flag_bin, options.flag_bins,

View File

@ -99,7 +99,7 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
let doc_opts = ops::DocOptions {
open_result: options.flag_open,
compile_opts: ops::CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,

View File

@ -159,16 +159,16 @@ pub fn execute(mut options: Options, config: &mut Config) -> CliResult {
no_fail_fast: options.flag_no_fail_fast,
only_doc: options.flag_doc,
compile_opts: ops::CompileOptions {
config: config,
config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| &s[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: spec,
spec,
release: options.flag_release,
mode: mode,
filter: filter,
mode,
filter,
message_format: options.flag_message_format,
target_rustdoc_args: None,
target_rustc_args: None,

View File

@ -74,7 +74,7 @@ pub fn execute(options: Options, config: &mut Config) -> CliResult {
aggressive: options.flag_aggressive,
precise: options.flag_precise.as_ref().map(|s| &s[..]),
to_update: &options.flag_package,
config: config,
config,
};
let ws = Workspace::new(&root, config)?;

View File

@ -275,22 +275,22 @@ impl Manifest {
im_a_teapot: Option<bool>,
original: Rc<TomlManifest>) -> Manifest {
Manifest {
summary: summary,
targets: targets,
summary,
targets,
warnings: Vec::new(),
exclude: exclude,
include: include,
links: links,
metadata: metadata,
profiles: profiles,
publish: publish,
replace: replace,
patch: patch,
workspace: workspace,
features: features,
epoch: epoch,
original: original,
im_a_teapot: im_a_teapot,
exclude,
include,
links,
metadata,
profiles,
publish,
replace,
patch,
workspace,
features,
epoch,
original,
im_a_teapot,
}
}
@ -372,10 +372,10 @@ impl VirtualManifest {
workspace: WorkspaceConfig,
profiles: Profiles) -> VirtualManifest {
VirtualManifest {
replace: replace,
patch: patch,
workspace: workspace,
profiles: profiles,
replace,
patch,
workspace,
profiles,
}
}
@ -430,7 +430,7 @@ impl Target {
Target {
kind: TargetKind::Bin,
name: name.to_string(),
required_features: required_features,
required_features,
doc: true,
..Target::with_path(src_path)
}
@ -459,9 +459,9 @@ impl Target {
};
Target {
kind: kind,
kind,
name: name.to_string(),
required_features: required_features,
required_features,
benched: false,
..Target::with_path(src_path)
}
@ -472,7 +472,7 @@ impl Target {
Target {
kind: TargetKind::Test,
name: name.to_string(),
required_features: required_features,
required_features,
benched: false,
..Target::with_path(src_path)
}
@ -483,7 +483,7 @@ impl Target {
Target {
kind: TargetKind::Bench,
name: name.to_string(),
required_features: required_features,
required_features,
tested: false,
..Target::with_path(src_path)
}

View File

@ -58,9 +58,9 @@ impl ser::Serialize for Package {
name: package_id.name(),
version: &package_id.version().to_string(),
id: package_id,
license: license,
license_file: license_file,
description: description,
license,
license_file,
description,
source: summary.source_id(),
dependencies: summary.dependencies(),
targets: self.manifest.targets(),
@ -75,7 +75,7 @@ impl Package {
pub fn new(manifest: Manifest,
manifest_path: &Path) -> Package {
Package {
manifest: manifest,
manifest,
manifest_path: manifest_path.to_path_buf(),
}
}

View File

@ -64,8 +64,8 @@ impl<'de> de::Deserialize<'de> for PackageId {
Ok(PackageId {
inner: Arc::new(PackageIdInner {
name: name.to_string(),
version: version,
source_id: source_id,
version,
source_id,
}),
})
}

View File

@ -40,7 +40,7 @@ impl PackageIdSpec {
}
Ok(PackageIdSpec {
name: name.to_string(),
version: version,
version,
url: None,
})
}
@ -100,8 +100,8 @@ impl PackageIdSpec {
}
};
Ok(PackageIdSpec {
name: name,
version: version,
name,
version,
url: Some(url),
})
}

View File

@ -110,7 +110,7 @@ impl<'cfg> PackageRegistry<'cfg> {
sources: SourceMap::new(),
source_ids: HashMap::new(),
overrides: Vec::new(),
source_config: source_config,
source_config,
locked: HashMap::new(),
patches: HashMap::new(),
patches_locked: false,

View File

@ -179,10 +179,10 @@ impl EncodableResolve {
graph: g,
empty_features: HashSet::new(),
features: HashMap::new(),
replacements: replacements,
checksums: checksums,
metadata: metadata,
unused_patches: unused_patches,
replacements,
checksums,
metadata,
unused_patches,
})
}
}
@ -372,8 +372,8 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
EncodableResolve {
package: Some(encodable),
root: None,
metadata: metadata,
patch: patch,
metadata,
patch,
}.serialize(s)
}
}
@ -399,7 +399,7 @@ fn encodable_resolve_node(id: &PackageId, resolve: &Resolve)
version: id.version().to_string(),
source: encode_source(id.source_id()),
dependencies: deps,
replace: replace,
replace,
}
}

View File

@ -70,9 +70,9 @@ impl SourceId {
fn new(kind: Kind, url: Url) -> CargoResult<SourceId> {
let source_id = SourceId {
inner: Arc::new(SourceIdInner {
kind: kind,
kind,
canonical_url: git::canonicalize_url(&url)?,
url: url,
url,
precise: None,
name: None,
}),
@ -191,7 +191,7 @@ impl SourceId {
inner: Arc::new(SourceIdInner {
kind: Kind::Registry,
canonical_url: git::canonicalize_url(&url)?,
url: url,
url,
precise: None,
name: Some(key.to_string()),
}),
@ -524,6 +524,6 @@ mod tests {
let foo = Kind::Git(GitReference::Branch("foo".to_string()));
let s3 = SourceId::new(foo, loc).unwrap();
assert!(s1 != s3);
assert_ne!(s1, s3);
}
}

View File

@ -125,14 +125,14 @@ impl<'cfg> Workspace<'cfg> {
let target_dir = config.target_dir()?;
let mut ws = Workspace {
config: config,
config,
current_manifest: manifest_path.to_path_buf(),
packages: Packages {
config: config,
config,
packages: HashMap::new(),
},
root_manifest: None,
target_dir: target_dir,
target_dir,
members: Vec::new(),
default_members: Vec::new(),
is_ephemeral: false,
@ -162,10 +162,10 @@ impl<'cfg> Workspace<'cfg> {
target_dir: Option<Filesystem>,
require_optional_deps: bool) -> CargoResult<Workspace<'cfg>> {
let mut ws = Workspace {
config: config,
config,
current_manifest: package.manifest_path().to_path_buf(),
packages: Packages {
config: config,
config,
packages: HashMap::new(),
},
root_manifest: None,
@ -173,7 +173,7 @@ impl<'cfg> Workspace<'cfg> {
members: Vec::new(),
default_members: Vec::new(),
is_ephemeral: true,
require_optional_deps: require_optional_deps,
require_optional_deps,
};
{
let key = ws.current_manifest.parent().unwrap();

View File

@ -216,7 +216,7 @@ pub fn version() -> VersionInfo {
pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
cfg_info: Some(CfgInfo {
release_channel: option_env_str!("CFG_RELEASE_CHANNEL").unwrap(),
commit_info: commit_info,
commit_info,
}),
}
},

View File

@ -70,14 +70,14 @@ impl<'a> CompileOptions<'a> {
pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a>
{
CompileOptions {
config: config,
config,
jobs: None,
target: None,
features: &[],
all_features: false,
no_default_features: false,
spec: ops::Packages::Packages(&[]),
mode: mode,
mode,
release: false,
filter: CompileFilter::Default { required_features_filterable: false },
message_format: MessageFormat::Human,
@ -455,7 +455,7 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
targets.iter().filter(|t| t.benched()).map(|t| {
BuildProposal {
target: t,
profile: profile,
profile,
required: !required_features_filterable,
}
}).collect::<Vec<_>>()
@ -489,7 +489,7 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
t.is_bin() || t.is_lib()
}).map(|t| BuildProposal {
target: t,
profile: profile,
profile,
required: !required_features_filterable,
}).collect()
}
@ -501,7 +501,7 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
)
}).map(|t| BuildProposal {
target: t,
profile: profile,
profile,
required: !required_features_filterable,
}).collect()
}
@ -510,7 +510,7 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
if t.doctested() {
return vec![BuildProposal {
target: t,
profile: profile,
profile,
required: !required_features_filterable,
}];
}
@ -532,7 +532,7 @@ fn propose_indicated_targets<'a>(pkg: &'a Package,
let result = pkg.targets().iter().filter(|t| is_expected_kind(t)).map(|t| {
BuildProposal {
target: t,
profile: profile,
profile,
required: false,
}
});
@ -561,7 +561,7 @@ fn propose_indicated_targets<'a>(pkg: &'a Package,
debug!("found {} `{}`", desc, name);
targets.push(BuildProposal {
target: t,
profile: profile,
profile,
required: true,
});
}
@ -650,7 +650,7 @@ fn generate_targets<'a>(pkg: &'a Package,
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
targets.push(BuildProposal {
target: t,
profile: profile,
profile,
required: true,
});
} else if !all_targets {
@ -721,7 +721,7 @@ fn scrape_build_config(config: &Config,
let mut base = ops::BuildConfig {
host_triple: config.rustc()?.host.clone(),
requested_target: target.clone(),
jobs: jobs,
jobs,
..Default::default()
};
base.host = scrape_target_config(config, &base.host_triple)?;

View File

@ -291,7 +291,7 @@ pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
let mkopts = MkOptions {
version_control: opts.version_control,
path: &path,
name: name,
name,
source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())],
bin: opts.kind.is_bin(),
};
@ -580,8 +580,8 @@ fn global_config(config: &Config) -> CargoResult<CargoNewConfig> {
None => None
};
Ok(CargoNewConfig {
name: name,
email: email,
name,
email,
version_control: vcs,
})
}

View File

@ -59,10 +59,10 @@ fn metadata_full(ws: &Workspace,
.collect::<CargoResult<Vec<_>>>()?;
Ok(ExportInfo {
packages: packages,
packages,
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
resolve: Some(MetadataResolve{
resolve: resolve,
resolve,
root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
}),
target_directory: ws.target_dir().display().to_string(),
@ -102,7 +102,7 @@ fn serialize_resolve<S>(resolve: &Resolve, s: S) -> Result<S::Ok, S::Error>
resolve.iter().map(|id| {
Node {
id: id,
id,
dependencies: resolve.deps(id).collect(),
}
}).collect::<Vec<_>>().serialize(s)

View File

@ -292,7 +292,7 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
ops::compile_ws(&ws, None, &ops::CompileOptions {
config: config,
config,
jobs: opts.jobs,
target: opts.target,
features: &[],

View File

@ -80,7 +80,7 @@ impl<'cfg> Compilation<'cfg> {
to_doc_test: Vec::new(),
cfgs: HashMap::new(),
rustdocflags: HashMap::new(),
config: config,
config,
target: String::new(),
target_runner: LazyCell::new(),
}

View File

@ -175,26 +175,26 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
};
Ok(Context {
ws: ws,
ws,
host: host_layout,
target: target_layout,
resolve: resolve,
packages: packages,
config: config,
resolve,
packages,
config,
target_info: TargetInfo::default(),
host_info: TargetInfo::default(),
compilation: Compilation::new(config),
build_state: Arc::new(BuildState::new(&build_config)),
build_config: build_config,
build_config,
fingerprints: HashMap::new(),
profiles: profiles,
profiles,
compiled: HashSet::new(),
build_scripts: HashMap::new(),
build_explicit_deps: HashMap::new(),
links: Links::new(),
used_in_plugin: HashSet::new(),
incremental_env,
jobserver: jobserver,
jobserver,
build_script_overridden: HashSet::new(),
// TODO: Pre-Calculate these with a topo-sort, rather than lazy-calculating
@ -346,7 +346,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
let cfg = if has_cfg_and_sysroot {
Some(try!(lines.map(Cfg::from_str).collect()))
Some(lines.map(Cfg::from_str).collect::<CargoResult<_>>()?)
} else {
None
};
@ -819,7 +819,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
Ok(pkg) => {
pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let unit = Unit {
pkg: pkg,
pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),

View File

@ -331,7 +331,7 @@ impl BuildState {
}
BuildState {
outputs: Mutex::new(HashMap::new()),
overrides: overrides,
overrides,
}
}
@ -416,14 +416,14 @@ impl BuildOutput {
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
env: env,
metadata: metadata,
rerun_if_changed: rerun_if_changed,
rerun_if_env_changed: rerun_if_env_changed,
warnings: warnings,
library_paths,
library_links,
cfgs,
env,
metadata,
rerun_if_changed,
rerun_if_env_changed,
warnings,
})
}

View File

@ -419,7 +419,7 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// actually affect the output artifact so there's no need to hash it.
path: util::hash_u64(&super::path_args(cx, unit).0),
features: format!("{:?}", cx.resolve.features_sorted(unit.pkg.package_id())),
deps: deps,
deps,
local: vec![local],
memoized_hash: Mutex::new(None),
epoch: unit.pkg.manifest().epoch(),
@ -473,7 +473,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
path: 0,
features: String::new(),
deps: Vec::new(),
local: local,
local,
memoized_hash: Mutex::new(None),
epoch: Epoch::Epoch2015,
rustflags: Vec::new(),

View File

@ -47,7 +47,7 @@ impl Work {
impl Job {
/// Create a new job representing a unit of work.
pub fn new(dirty: Work, fresh: Work) -> Job {
Job { dirty: dirty, fresh: fresh }
Job { dirty, fresh }
}
/// Consumes this job by running it, returning the result of the

View File

@ -81,8 +81,8 @@ impl<'a> JobQueue<'a> {
let (tx, rx) = channel();
JobQueue {
queue: DependencyQueue::new(),
tx: tx,
rx: rx,
tx,
rx,
active: 0,
pending: HashMap::new(),
compiled: HashSet::new(),

View File

@ -117,7 +117,7 @@ impl Layout {
incremental: root.join("incremental"),
fingerprint: root.join(".fingerprint"),
examples: root.join("examples"),
root: root,
root,
_lock: lock,
})
}

View File

@ -149,9 +149,9 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
};
targets.iter().map(move |&(target, profile)| {
Unit {
pkg: pkg,
target: target,
profile: profile,
pkg,
target,
profile,
kind: if target.for_host() {Kind::Host} else {default_kind},
}
})
@ -576,9 +576,9 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
package_id: &package_id,
target: &target,
profile: &profile,
features: features,
features,
filenames: destinations,
fresh: fresh,
fresh,
});
}
Ok(())

View File

@ -180,7 +180,7 @@ fn transmit(config: &Config,
let publish = registry.publish(&NewCrate {
name: pkg.name().to_string(),
vers: pkg.version().to_string(),
deps: deps,
deps,
features: pkg.summary().features().clone(),
authors: authors.clone(),
description: description.clone(),
@ -241,8 +241,8 @@ pub fn registry_configuration(config: &Config,
};
Ok(RegistryConfig {
index: index,
token: token
index,
token
})
}

View File

@ -234,7 +234,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
// for any other packages specified with `-p`.
Method::Required { dev_deps, .. } => {
let base = Method::Required {
dev_deps: dev_deps,
dev_deps,
features: &[],
uses_default_features: true,
};

View File

@ -56,7 +56,7 @@ impl<'cfg> SourceConfigMap<'cfg> {
let mut base = SourceConfigMap {
cfgs: HashMap::new(),
id2name: HashMap::new(),
config: config,
config,
};
base.add("crates-io", SourceConfig {
id: SourceId::crates_io(config)?,
@ -208,7 +208,7 @@ restore the source replacement configuration to continue the build
self.add(name, SourceConfig {
id: src,
replace_with: replace_with,
replace_with,
});
return Ok(());

View File

@ -33,7 +33,7 @@ impl<'cfg> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
config: config,
config,
packages: HashMap::new(),
}
}

View File

@ -37,13 +37,13 @@ impl<'cfg> GitSource<'cfg> {
};
let source = GitSource {
remote: remote,
reference: reference,
remote,
reference,
source_id: source_id.clone(),
path_source: None,
rev: None,
ident: ident,
config: config,
ident,
config,
};
Ok(source)

View File

@ -109,7 +109,7 @@ impl GitRemote {
Ok(GitDatabase {
remote: self.clone(),
path: into.to_path_buf(),
repo: repo,
repo,
})
}
@ -118,7 +118,7 @@ impl GitRemote {
Ok(GitDatabase {
remote: self.clone(),
path: db_path.to_path_buf(),
repo: repo,
repo,
})
}
@ -210,9 +210,9 @@ impl<'a> GitCheckout<'a> {
{
GitCheckout {
location: path.to_path_buf(),
database: database,
revision: revision,
repo: repo,
database,
revision,
repo,
}
}

View File

@ -34,7 +34,7 @@ impl<'cfg> PathSource<'cfg> {
path: path.to_path_buf(),
updated: false,
packages: Vec::new(),
config: config,
config,
recursive: false,
}
}

View File

@ -31,8 +31,8 @@ impl<'cfg> RegistryIndex<'cfg> {
path: path.clone(),
cache: HashMap::new(),
hashes: HashMap::new(),
config: config,
locked: locked,
config,
locked,
}
}

View File

@ -25,7 +25,7 @@ impl<'cfg> LocalRegistry<'cfg> {
src_path: config.registry_source_path().join(name),
index_path: Filesystem::new(root.join("index")),
root: Filesystem::new(root.to_path_buf()),
config: config,
config,
}
}
}

View File

@ -288,15 +288,15 @@ impl<'cfg> RegistrySource<'cfg> {
index_locked: bool) -> RegistrySource<'cfg> {
RegistrySource {
src_path: config.registry_source_path().join(name),
config: config,
config,
source_id: source_id.clone(),
updated: false,
index: index::RegistryIndex::new(source_id,
ops.index_path(),
config,
index_locked),
index_locked: index_locked,
ops: ops,
index_locked,
ops,
}
}

View File

@ -36,7 +36,7 @@ impl<'cfg> RemoteRegistry<'cfg> {
index_path: config.registry_index_path().join(name),
cache_path: config.registry_cache_path().join(name),
source_id: source_id.clone(),
config: config,
config,
tree: RefCell::new(None),
repo: LazyCell::new(),
head: Cell::new(None),

View File

@ -84,7 +84,7 @@ impl Config {
home_path: Filesystem::new(homedir),
shell: RefCell::new(shell),
rustc: LazyCell::new(),
cwd: cwd,
cwd,
values: LazyCell::new(),
cargo_exe: LazyCell::new(),
rustdoc: LazyCell::new(),

View File

@ -104,8 +104,8 @@ impl CargoTestError {
.collect::<Vec<String>>()
.join("\n");
CargoTestError {
test: test,
desc: desc,
test,
desc,
exit: errors[0].exit,
causes: errors,
}
@ -201,7 +201,7 @@ pub fn process_error(msg: &str,
}
return ProcessError {
desc: desc,
desc,
exit: status.cloned(),
output: output.cloned(),
};

View File

@ -17,7 +17,7 @@ pub struct FileLock {
state: State,
}
#[derive(PartialEq)]
#[derive(PartialEq, Debug)]
enum State {
Unlocked,
Shared,
@ -35,13 +35,13 @@ impl FileLock {
/// Note that special care must be taken to ensure that the path is not
/// referenced outside the lifetime of this lock.
pub fn path(&self) -> &Path {
assert!(self.state != State::Unlocked);
assert_ne!(self.state, State::Unlocked);
&self.path
}
/// Returns the parent path containing this file
pub fn parent(&self) -> &Path {
assert!(self.state != State::Unlocked);
assert_ne!(self.state, State::Unlocked);
self.path.parent().unwrap()
}
@ -229,7 +229,7 @@ impl Filesystem {
State::Unlocked => {}
}
Ok(FileLock { f: Some(f), path: path, state: state })
Ok(FileLock { f: Some(f), path, state })
}
}

View File

@ -114,7 +114,7 @@ mod imp {
return None
}
Some(Setup { job: job })
Some(Setup { job })
}
impl Drop for Setup {

View File

@ -244,9 +244,9 @@ impl ProcessBuilder {
None)
})?;
let output = Output {
stdout: stdout,
stderr: stderr,
status: status,
stdout,
stderr,
status,
};
{

View File

@ -137,7 +137,7 @@ mod imp {
impl<'a> Pipe<'a> {
unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
Pipe {
dst: dst,
dst,
pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
overlapped: Overlapped::zero(),
done: false,

View File

@ -40,10 +40,10 @@ impl Rustc {
};
Ok(Rustc {
path: path,
wrapper: wrapper,
verbose_version: verbose_version,
host: host,
path,
wrapper,
verbose_version,
host,
})
}

View File

@ -301,6 +301,12 @@ impl<'de> de::Deserialize<'de> for U32OrBool {
formatter.write_str("a boolean or an integer")
}
fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
where E: de::Error,
{
Ok(U32OrBool::Bool(b))
}
fn visit_i64<E>(self, u: i64) -> Result<Self::Value, E>
where E: de::Error,
{
@ -312,12 +318,6 @@ impl<'de> de::Deserialize<'de> for U32OrBool {
{
Ok(U32OrBool::U32(u as u32))
}
fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
where E: de::Error,
{
Ok(U32OrBool::Bool(b))
}
}
deserializer.deserialize_any(Visitor)
@ -361,17 +361,17 @@ impl<'de> de::Deserialize<'de> for StringOrBool {
formatter.write_str("a boolean or a string")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where E: de::Error,
{
Ok(StringOrBool::String(s.to_string()))
}
fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
where E: de::Error,
{
Ok(StringOrBool::Bool(b))
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where E: de::Error,
{
Ok(StringOrBool::String(s.to_string()))
}
}
deserializer.deserialize_any(Visitor)
@ -604,9 +604,9 @@ impl TomlManifest {
let mut cx = Context {
pkgid: Some(&pkgid),
deps: &mut deps,
source_id: source_id,
source_id,
nested_paths: &mut nested_paths,
config: config,
config,
warnings: &mut warnings,
features: &features,
platform: None,
@ -800,13 +800,13 @@ impl TomlManifest {
let mut cx = Context {
pkgid: None,
deps: &mut deps,
source_id: source_id,
source_id,
nested_paths: &mut nested_paths,
config: config,
config,
warnings: &mut warnings,
platform: None,
features: &features,
root: root
root
};
(me.replace(&mut cx)?, me.patch(&mut cx)?)
};
@ -1207,7 +1207,7 @@ fn build_profiles(profiles: &Option<TomlProfiles>) -> Profiles {
Some(StringOrBool::String(ref n)) => Lto::Named(n.clone()),
None => profile.lto,
},
codegen_units: codegen_units,
codegen_units,
rustc_args: None,
rustdoc_args: None,
debuginfo: debug.unwrap_or(profile.debuginfo),

View File

@ -103,9 +103,9 @@ impl Registry {
token: Option<String>,
handle: Easy) -> Registry {
Registry {
host: host,
token: token,
handle: handle,
host,
token,
handle,
}
}
@ -197,8 +197,8 @@ impl Registry {
.unwrap_or_else(Vec::new);
Ok(Warnings {
invalid_categories: invalid_categories,
invalid_badges: invalid_badges,
invalid_categories,
invalid_badges,
})
}

View File

@ -27,7 +27,7 @@ impl RepoBuilder {
t!(config.set_str("user.name", "name"));
t!(config.set_str("user.email", "email"));
}
RepoBuilder { repo: repo, files: Vec::new() }
RepoBuilder { repo, files: Vec::new() }
}
pub fn file(self, path: &str, contents: &str) -> RepoBuilder {

View File

@ -45,7 +45,7 @@ struct FileBuilder {
impl FileBuilder {
pub fn new(path: PathBuf, body: &str) -> FileBuilder {
FileBuilder { path: path, body: body.to_string() }
FileBuilder { path, body: body.to_string() }
}
fn mk(&self) {
@ -71,7 +71,7 @@ struct SymlinkBuilder {
impl SymlinkBuilder {
pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
SymlinkBuilder { dst: dst, src: src }
SymlinkBuilder { dst, src }
}
#[cfg(unix)]
@ -699,7 +699,7 @@ fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value)
assert!(r.len() > 0);
Some((&l[0], &r[0]))
} else {
assert!(r.len() == 0);
assert_eq!(r.len(), 0);
None
}
}

View File

@ -67,7 +67,7 @@ pub trait CargoPathExt {
self.move_in_time(|sec, nsec| (sec + 3600, nsec))
}
fn move_in_time<F>(&self, F)
fn move_in_time<F>(&self, travel_amount: F)
where F: Fn(u64, u32) -> (u64, u32);
}

View File

@ -602,7 +602,7 @@ fn git_lock_file_doesnt_change() {
let mut lock2 = String::new();
t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock2));
assert!(lock1 == lock2, "lock files changed");
assert_eq!(lock1, lock2, "lock files changed");
}
#[test]

View File

@ -42,7 +42,7 @@ fn adding_and_removing_packages() {
assert_that(p.cargo("generate-lockfile"),
execs().with_status(0));
let lock2 = p.read_lockfile();
assert!(lock1 != lock2);
assert_ne!(lock1, lock2);
// change the dep
File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#"
@ -54,8 +54,8 @@ fn adding_and_removing_packages() {
assert_that(p.cargo("generate-lockfile"),
execs().with_status(0));
let lock3 = p.read_lockfile();
assert!(lock1 != lock3);
assert!(lock2 != lock3);
assert_ne!(lock1, lock3);
assert_ne!(lock2, lock3);
// remove the dep
println!("lock4");

View File

@ -901,7 +901,7 @@ fn use_path_workspace() {
let lock = p.read_lockfile();
assert_that(p.cargo("install"), execs().with_status(0));
let lock2 = p.read_lockfile();
assert!(lock == lock2, "different lockfiles");
assert_eq!(lock, lock2, "different lockfiles");
}
#[test]

View File

@ -681,7 +681,7 @@ fn remove_patch() {
assert!(lock_file1.contains("bar"));
assert_eq!(lock_file2, lock_file3);
assert!(lock_file1 != lock_file2);
assert_ne!(lock_file1, lock_file2);
}
#[test]