Update to rust style guidelines

* 80 char line limit highly recommended
* /// and //! for doc comments
This commit is contained in:
Alex Crichton 2014-06-19 16:45:19 -07:00
parent 608133376a
commit 64ff29ff86
35 changed files with 570 additions and 324 deletions

3
.travis.check.style.sh Executable file
View File

@ -0,0 +1,3 @@
echo "checking for lines over 100 characters..."
find src tests -name '*.rs' | xargs grep '.\{101,\}' && exit 1
echo "ok"

View File

@ -4,6 +4,7 @@ install:
- . ./.travis.install.deps.sh
script:
- ./.travis.check.style.sh
- make CC="$CC" RUSTC="$RUSTC" -j4
- make CC="$CC" RUSTC="$RUSTC" test -j4

View File

@ -33,9 +33,14 @@ fn execute(options: Options) -> CliResult<Option<()>> {
Some(path) => Path::new(path),
None => try!(find_project(os::getcwd(), "Cargo.toml")
.map(|path| path.join("Cargo.toml"))
.map_err(|_|
CliError::new("Could not find Cargo.toml in this directory or any parent directory", 102)))
.map_err(|_| {
CliError::new("Could not find Cargo.toml in this \
directory or any parent directory",
102)
}))
};
ops::compile(&root).map(|_| None).map_err(|err| CliError::from_boxed(err, 101))
ops::compile(&root).map(|_| None).map_err(|err| {
CliError::from_boxed(err, 101)
})
}

View File

@ -29,14 +29,18 @@ fn execute(options: Options) -> CliResult<Option<()>> {
let Options { url, reference, .. } = options;
let url: Url = try!(from_str(url.as_slice())
.require(|| human(format!("The URL `{}` you passed was not a valid URL", url)))
.require(|| human(format!("The URL `{}` you passed was \
not a valid URL", url)))
.map_err(|e| CliError::from_boxed(e, 1)));
let source_id = SourceId::for_git(&url, reference.as_slice());
let mut source = GitSource::new(&source_id, &try!(Config::new().map_err(|e| CliError::from_boxed(e, 1))));
let config = try!(Config::new().map_err(|e| CliError::from_boxed(e, 1)));
let mut source = GitSource::new(&source_id, &config);
try!(source.update().map_err(|e| CliError::new(format!("Couldn't update {}: {}", source, e), 1)));
try!(source.update().map_err(|e| {
CliError::new(format!("Couldn't update {}: {}", source, e), 1)
}));
Ok(None)
}

View File

@ -22,7 +22,8 @@ fn main() {
}
fn execute(options: Options) -> CliResult<Option<Package>> {
let source_id = SourceId::for_path(&Path::new(options.manifest_path.as_slice()));
let path = Path::new(options.manifest_path.as_slice());
let source_id = SourceId::for_path(&path);
let mut source = PathSource::new(&source_id);
try!(source.update().map_err(|err| CliError::new(err.description(), 1)));

View File

@ -59,7 +59,9 @@ fn execute() {
match command {
Ok(ExitStatus(0)) => (),
Ok(ExitStatus(i)) | Ok(ExitSignal(i)) => handle_error(CliError::new("", i as uint), false),
Ok(ExitStatus(i)) | Ok(ExitSignal(i)) => {
handle_error(CliError::new("", i as uint), false)
}
Err(_) => handle_error(CliError::new("No such subcommand", 127), false)
}
}
@ -67,7 +69,9 @@ fn execute() {
fn process(args: Vec<String>) -> CliResult<(String, Vec<String>)> {
let args: Vec<String> = Vec::from_slice(args.tail());
let head = try!(args.iter().nth(0).require(|| human("No subcommand found")).map_err(|err| CliError::from_boxed(err, 1))).to_str();
let head = try!(args.iter().nth(0).require(|| {
human("No subcommand found")
}).map_err(|err| CliError::from_boxed(err, 1))).to_str();
let tail = Vec::from_slice(args.tail());
Ok((head, tail))
@ -85,14 +89,17 @@ struct ConfigForKeyFlags {
}
impl FlagConfig for ConfigForKeyFlags {
fn config(_: Option<ConfigForKeyFlags>, config: FlagConfiguration) -> FlagConfiguration {
fn config(_: Option<ConfigForKeyFlags>,
config: FlagConfiguration) -> FlagConfiguration {
config.short("human", 'h')
}
}
fn config_for_key(args: ConfigForKeyFlags) -> CliResult<Option<ConfigOut>> {
let value = try!(config::get_config(os::getcwd(), args.key.as_slice()).map_err(|_|
CliError::new("Couldn't load configuration", 1)));
let value = try!(config::get_config(os::getcwd(),
args.key.as_slice()).map_err(|_| {
CliError::new("Couldn't load configuration", 1)
}));
if args.human {
println!("{}", value);
@ -110,7 +117,8 @@ struct ConfigListFlags {
}
impl FlagConfig for ConfigListFlags {
fn config(_: Option<ConfigListFlags>, config: FlagConfiguration) -> FlagConfiguration {
fn config(_: Option<ConfigListFlags>,
config: FlagConfiguration) -> FlagConfiguration {
config.short("human", 'h')
}
}
@ -130,10 +138,13 @@ fn config_list(args: ConfigListFlags) -> CliResult<Option<ConfigOut>> {
}
fn locate_project(_: NoFlags) -> CliResult<Option<ProjectLocation>> {
let root = try!(find_project(os::getcwd(), "Cargo.toml").map_err(|e| CliError::from_boxed(e, 1)));
let root = try!(find_project(os::getcwd(), "Cargo.toml").map_err(|e| {
CliError::from_boxed(e, 1)
}));
let string = try!(root.as_str()
.require(|| human("Your project path contains characters not representable in Unicode"))
.require(|| human("Your project path contains characters \
not representable in Unicode"))
.map_err(|e| CliError::from_boxed(e, 1)));
Ok(Some(ProjectLocation { root: string.to_str() }))

View File

@ -10,7 +10,8 @@ pub struct Dependency {
}
impl Dependency {
pub fn new(name: &str, req: &VersionReq, namespace: &SourceId) -> Dependency {
pub fn new(name: &str, req: &VersionReq,
namespace: &SourceId) -> Dependency {
Dependency {
name: name.to_str(),
namespace: namespace.clone(),
@ -18,7 +19,8 @@ impl Dependency {
}
}
pub fn parse(name: &str, version: &str, namespace: &SourceId) -> CargoResult<Dependency> {
pub fn parse(name: &str, version: &str,
namespace: &SourceId) -> CargoResult<Dependency> {
Ok(Dependency {
name: name.to_str(),
namespace: namespace.clone(),
@ -26,7 +28,8 @@ impl Dependency {
})
}
pub fn exact(name: &str, version: &Version, namespace: &SourceId) -> Dependency {
pub fn exact(name: &str, version: &Version,
namespace: &SourceId) -> Dependency {
Dependency {
name: name.to_str(),
namespace: namespace.clone(),

View File

@ -30,7 +30,8 @@ pub struct CLIError {
}
impl CLIError {
pub fn new<T: Show, U: Show>(msg: T, detail: Option<U>, exit_code: uint) -> CLIError {
pub fn new<T: Show, U: Show>(msg: T, detail: Option<U>,
exit_code: uint) -> CLIError {
let detail = detail.map(|d| d.to_str());
CLIError { msg: msg.to_str(), detail: detail, exit_code: exit_code }
}
@ -58,7 +59,8 @@ impl Show for InternalError {
write!(f, "Couldn't convert `{}` into {}", string, type_name)
},
&MissingManifest(ref path, ref file) => {
write!(f, "Couldn't find a {} in the project (`{}` or any parent directory", file, path.display())
write!(f, "Couldn't find a {} in the project (`{}` or any \
parent directory", file, path.display())
},
&WrappedIoError(ref io_error) => {
write!(f, "{}", io_error)
@ -72,7 +74,8 @@ impl Show for InternalError {
}
impl CargoError {
pub fn cli(msg: String, detail: Option<String>, exit_code: uint) -> CargoError {
pub fn cli(msg: String, detail: Option<String>,
exit_code: uint) -> CargoError {
CargoCLIError(CLIError::new(msg, detail, exit_code))
}

View File

@ -47,7 +47,9 @@ impl<E, S: Encoder<E>> Encodable<S, E> for Manifest {
SerializedManifest {
name: self.summary.get_name().to_str(),
version: self.summary.get_version().to_str(),
dependencies: self.summary.get_dependencies().iter().map(|d| SerializedDependency::from_dependency(d)).collect(),
dependencies: self.summary.get_dependencies().iter().map(|d| {
SerializedDependency::from_dependency(d)
}).collect(),
authors: self.authors.clone(),
targets: self.targets.clone(),
target_dir: self.target_dir.display().to_str(),
@ -71,7 +73,8 @@ impl LibKind {
"rlib" => Ok(Rlib),
"dylib" => Ok(Dylib),
"staticlib" => Ok(StaticLib),
_ => Err(human(format!("{} was not one of lib|rlib|dylib|staticlib", string)))
_ => Err(human(format!("{} was not one of lib|rlib|dylib|staticlib",
string)))
}
}
@ -126,7 +129,8 @@ impl<E, S: Encoder<E>> Encodable<S, E> for Target {
impl Show for Target {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}(name={}, path={})", self.kind, self.name, self.path.display())
write!(f, "{}(name={}, path={})", self.kind, self.name,
self.path.display())
}
}
@ -186,7 +190,8 @@ impl Manifest {
}
impl Target {
pub fn lib_target(name: &str, crate_targets: Vec<LibKind>, path: &Path) -> Target {
pub fn lib_target(name: &str, crate_targets: Vec<LibKind>,
path: &Path) -> Target {
Target {
kind: LibTarget(crate_targets),
name: name.to_str(),

View File

@ -44,7 +44,9 @@ impl<E, S: Encoder<E>> Encodable<S, E> for Package {
SerializedPackage {
name: package_id.get_name().to_str(),
version: package_id.get_version().to_str(),
dependencies: summary.get_dependencies().iter().map(|d| SerializedDependency::from_dependency(d)).collect(),
dependencies: summary.get_dependencies().iter().map(|d| {
SerializedDependency::from_dependency(d)
}).collect(),
authors: Vec::from_slice(manifest.get_authors()),
targets: Vec::from_slice(manifest.get_targets()),
manifest_path: self.manifest_path.display().to_str()
@ -124,7 +126,8 @@ pub struct PackageSet {
impl PackageSet {
pub fn new(packages: &[Package]) -> PackageSet {
//assert!(packages.len() > 0, "PackageSet must be created with at least one package")
//assert!(packages.len() > 0,
// "PackageSet must be created with at least one package")
PackageSet { packages: Vec::from_slice(packages) }
}
@ -136,9 +139,7 @@ impl PackageSet {
self.packages.pop().unwrap()
}
/**
* Get a package by name out of the set
*/
/// Get a package by name out of the set
pub fn get<'a>(&'a self, name: &str) -> &'a Package {
self.packages.iter().find(|pkg| name == pkg.get_name()).unwrap()
}
@ -164,7 +165,9 @@ impl PackageSet {
graph.add(pkg.get_name(), deps.as_slice());
}
let pkgs = some!(graph.sort()).iter().map(|name| self.get(*name).clone()).collect();
let pkgs = some!(graph.sort()).iter().map(|name| {
self.get(*name).clone()
}).collect();
Some(PackageSet {
packages: pkgs

View File

@ -56,7 +56,8 @@ pub struct PackageId {
}
impl PackageId {
pub fn new<T: ToVersion, U: ToUrl>(name: &str, version: T, namespace: U) -> PackageId {
pub fn new<T: ToVersion, U: ToUrl>(name: &str, version: T,
namespace: U) -> PackageId {
PackageId {
name: name.to_str(),
version: version.to_version().unwrap(),
@ -104,6 +105,7 @@ impl<E, D: Decoder<E>> Decodable<D,E> for PackageId {
impl<E, S: Encoder<E>> Encodable<S,E> for PackageId {
fn encode(&self, e: &mut S) -> Result<(), E> {
(vec!(self.name.clone(), self.version.to_str()), self.namespace.to_str()).encode(e)
(vec!(self.name.clone(), self.version.to_str()),
self.namespace.to_str()).encode(e)
}
}

View File

@ -23,7 +23,8 @@ pub struct PackageRegistry {
}
impl PackageRegistry {
pub fn new(source_ids: Vec<SourceId>, override_ids: Vec<SourceId>) -> CargoResult<PackageRegistry> {
pub fn new(source_ids: Vec<SourceId>,
override_ids: Vec<SourceId>) -> CargoResult<PackageRegistry> {
let mut reg = PackageRegistry::empty();
for id in source_ids.iter() {
@ -47,9 +48,11 @@ impl PackageRegistry {
}
pub fn get(&self, package_ids: &[PackageId]) -> CargoResult<Vec<Package>> {
log!(5, "getting packags; sources={}; ids={}", self.sources.len(), package_ids);
log!(5, "getting packags; sources={}; ids={}", self.sources.len(),
package_ids);
// TODO: Only call source with package ID if the package came from the source
// TODO: Only call source with package ID if the package came from the
// source
let mut ret = Vec::new();
for source in self.sources.iter() {
@ -60,7 +63,8 @@ impl PackageRegistry {
}
// TODO: Return earlier if fail
assert!(package_ids.len() == ret.len(), "could not get packages from registry; ids={}", package_ids);
assert!(package_ids.len() == ret.len(),
"could not get packages from registry; ids={}", package_ids);
Ok(ret)
}
@ -71,9 +75,10 @@ impl PackageRegistry {
Ok(())
}
fn load(&mut self, namespace: &SourceId, override: bool) -> CargoResult<()> {
fn load(&mut self, namespace: &SourceId,
override: bool) -> CargoResult<()> {
let mut source = namespace.load(&try!(Config::new()));
let dst = if override { &mut self.overrides } else { &mut self.summaries };
let dst = if override {&mut self.overrides} else {&mut self.summaries};
// Ensure the source has fetched all necessary remote data.
try!(source.update());

View File

@ -12,7 +12,8 @@ use util::errors::CargoResult;
* - The correct input here is not a registry. Resolves should be performable
* on package summaries vs. the packages themselves.
*/
pub fn resolve<R: Registry>(deps: &[Dependency], registry: &mut R) -> CargoResult<Vec<PackageId>> {
pub fn resolve<R: Registry>(deps: &[Dependency],
registry: &mut R) -> CargoResult<Vec<PackageId>> {
log!(5, "resolve; deps={}", deps);
let mut remaining = Vec::from_slice(deps);
@ -22,7 +23,9 @@ pub fn resolve<R: Registry>(deps: &[Dependency], registry: &mut R) -> CargoResul
let curr = match remaining.pop() {
Some(curr) => curr,
None => {
let ret = resolve.values().map(|summary| summary.get_package_id().clone()).collect();
let ret = resolve.values().map(|summary| {
summary.get_package_id().clone()
}).collect();
log!(5, "resolve complete; ret={}", ret);
return Ok(ret);
}
@ -73,23 +76,31 @@ mod test {
macro_rules! pkg(
($name:expr => $($deps:expr),+) => (
{
let source_id = SourceId::new(RegistryKind, url::from_str("http://example.com").unwrap());
let d: Vec<Dependency> = vec!($($deps),+).iter().map(|s| Dependency::parse(*s, "1.0.0", &source_id).unwrap()).collect();
Summary::new(&PackageId::new($name, "1.0.0", "http://www.example.com/"), d.as_slice())
let url = url::from_str("http://example.com").unwrap();
let source_id = SourceId::new(RegistryKind, url);
let d: Vec<Dependency> = vec!($($deps),+).iter().map(|s| {
Dependency::parse(*s, "1.0.0", &source_id).unwrap()
}).collect();
Summary::new(&PackageId::new($name, "1.0.0",
"http://www.example.com/"),
d.as_slice())
}
);
($name:expr) => (
Summary::new(&PackageId::new($name, "1.0.0", "http://www.example.com/"), [])
Summary::new(&PackageId::new($name, "1.0.0",
"http://www.example.com/"), [])
)
)
fn pkg(name: &str) -> Summary {
Summary::new(&PackageId::new(name, "1.0.0", "http://www.example.com/"), &[])
Summary::new(&PackageId::new(name, "1.0.0", "http://www.example.com/"),
&[])
}
fn dep(name: &str) -> Dependency {
let source_id = SourceId::new(RegistryKind, url::from_str("http://example.com").unwrap());
let url = url::from_str("http://example.com").unwrap();
let source_id = SourceId::new(RegistryKind, url);
Dependency::parse(name, "1.0.0", &source_id).unwrap()
}

View File

@ -24,13 +24,17 @@ impl<T: Writer + Send> Shell<T> {
pub fn create(out: T, config: ShellConfig) -> Option<Shell<T>> {
if config.tty && config.color {
let term: Option<term::TerminfoTerminal<T>> = Terminal::new(out);
term.map(|t| Shell { terminal: Color(box t as Box<Terminal<T>>), config: config })
term.map(|t| Shell {
terminal: Color(box t as Box<Terminal<T>>),
config: config
})
} else {
Some(Shell { terminal: NoColor(out), config: config })
}
}
pub fn verbose(&mut self, callback: |&mut Shell<T>| -> IoResult<()>) -> IoResult<()> {
pub fn verbose(&mut self,
callback: |&mut Shell<T>| -> IoResult<()>) -> IoResult<()> {
if self.config.verbose {
return callback(self)
}
@ -50,7 +54,11 @@ impl<T: Writer + Send> Shell<T> {
impl<T: Writer + Send> Terminal<T> for Shell<T> {
fn new(out: T) -> Option<Shell<T>> {
Shell::create(out, ShellConfig { color: true, verbose: false, tty: false })
Shell::create(out, ShellConfig {
color: true,
verbose: false,
tty: false,
})
}
fn fg(&mut self, color: color::Color) -> IoResult<bool> {

View File

@ -4,38 +4,28 @@ use core::{Summary,Package,PackageId};
use sources::{PathSource,GitSource};
use util::{Config,CargoResult};
/**
* A Source finds and downloads remote packages based on names and
* versions.
*/
/// A Source finds and downloads remote packages based on names and
/// versions.
pub trait Source {
/**
* The update method performs any network operations required to
* get the entire list of all names, versions and dependencies of
* packages managed by the Source.
*/
/// The update method performs any network operations required to
/// get the entire list of all names, versions and dependencies of
/// packages managed by the Source.
fn update(&mut self) -> CargoResult<()>;
/**
* The list method lists all names, versions and dependencies of
* packages managed by the source. It assumes that `update` has
* already been called and no additional network operations are
* required.
*/
/// The list method lists all names, versions and dependencies of
/// packages managed by the source. It assumes that `update` has
/// already been called and no additional network operations are
/// required.
fn list(&self) -> CargoResult<Vec<Summary>>;
/**
* The download method fetches the full package for each name and
* version specified.
*/
/// The download method fetches the full package for each name and
/// version specified.
fn download(&self, packages: &[PackageId]) -> CargoResult<()>;
/**
* The get method returns the Path of each specified package on the
* local file system. It assumes that `download` was already called,
* and that the packages are already locally available on the file
* system.
*/
/// The get method returns the Path of each specified package on the
/// local file system. It assumes that `download` was already called,
/// and that the packages are already locally available on the file
/// system.
fn get(&self, packages: &[PackageId]) -> CargoResult<Vec<Package>>;
}
@ -63,7 +53,8 @@ impl SourceId {
// Pass absolute path
pub fn for_path(path: &Path) -> SourceId {
// TODO: use proper path -> URL
SourceId::new(PathKind, url::from_str(format!("file://{}", path.display()).as_slice()).unwrap())
let url = format!("file://{}", path.display());
SourceId::new(PathKind, url::from_str(url.as_slice()).unwrap())
}
pub fn for_git(url: &Url, reference: &str) -> SourceId {
@ -71,7 +62,8 @@ impl SourceId {
}
pub fn for_central() -> SourceId {
SourceId::new(RegistryKind, url::from_str(format!("https://example.com").as_slice()).unwrap())
SourceId::new(RegistryKind,
url::from_str("https://example.com").unwrap())
}
pub fn get_url<'a>(&'a self) -> &'a Url {

View File

@ -4,10 +4,7 @@ use core::{
PackageId
};
/**
* Summaries are cloned, and should not be mutated after creation
*/
/// Summaries are cloned, and should not be mutated after creation
#[deriving(Show,Clone,PartialEq)]
pub struct Summary {
package_id: PackageId,

View File

@ -164,7 +164,8 @@ impl PredBuilder {
fn set_version_part(&mut self, part: &str) -> CargoResult<()> {
if self.op.is_none() {
// If no op is specified, then the predicate is an exact match on the version
// If no op is specified, then the predicate is an exact match on
// the version
self.op = Some(Ex);
}
@ -181,10 +182,8 @@ impl PredBuilder {
Ok(())
}
/**
* Validates that a version predicate can be created given the present
* information.
*/
/// Validates that a version predicate can be created given the present
/// information.
fn build(&self) -> CargoResult<Predicate> {
let op = match self.op {
Some(x) => x,

View File

@ -17,18 +17,20 @@ extern crate log;
#[cfg(test)]
extern crate hamcrest;
use serialize::{Decoder,Encoder,Decodable,Encodable,json};
use serialize::{Decoder, Encoder, Decodable, Encodable, json};
use std::io;
use hammer::{FlagDecoder, FlagConfig, HammerError, FlagConfiguration};
pub use util::{CargoError, CliError, CliResult, human};
macro_rules! some(
($e:expr) => (
match $e {
Some(e) => e,
None => return None
}
))
($e:expr) => (
match $e {
Some(e) => e,
None => return None
}
)
)
macro_rules! cargo_try (
($expr:expr) => ({
@ -65,8 +67,20 @@ impl FlagConfig for GlobalFlags {
}
}
pub fn execute_main<'a, T: RepresentsFlags, U: RepresentsJSON, V: Encodable<json::Encoder<'a>, io::IoError>>(exec: fn(T, U) -> CliResult<Option<V>>) {
fn call<'a, T: RepresentsFlags, U: RepresentsJSON, V: Encodable<json::Encoder<'a>, io::IoError>>(exec: fn(T, U) -> CliResult<Option<V>>, args: &[String]) -> CliResult<Option<V>> {
pub fn execute_main<'a,
T: RepresentsFlags,
U: RepresentsJSON,
V: Encodable<json::Encoder<'a>, io::IoError>>(
exec: fn(T, U) -> CliResult<Option<V>>)
{
fn call<'a,
T: RepresentsFlags,
U: RepresentsJSON,
V: Encodable<json::Encoder<'a>, io::IoError>>(
exec: fn(T, U) -> CliResult<Option<V>>,
args: &[String])
-> CliResult<Option<V>>
{
let flags = try!(flags_from_args::<T>(args));
let json = try!(json_from_stdin::<U>());
@ -79,8 +93,18 @@ pub fn execute_main<'a, T: RepresentsFlags, U: RepresentsJSON, V: Encodable<json
}
}
pub fn execute_main_without_stdin<'a, T: RepresentsFlags, V: Encodable<json::Encoder<'a>, io::IoError>>(exec: fn(T) -> CliResult<Option<V>>) {
fn call<'a, T: RepresentsFlags, V: Encodable<json::Encoder<'a>, io::IoError>>(exec: fn(T) -> CliResult<Option<V>>, args: &[String]) -> CliResult<Option<V>> {
pub fn execute_main_without_stdin<'a,
T: RepresentsFlags,
V: Encodable<json::Encoder<'a>, io::IoError>>(
exec: fn(T) -> CliResult<Option<V>>)
{
fn call<'a,
T: RepresentsFlags,
V: Encodable<json::Encoder<'a>, io::IoError>>(
exec: fn(T) -> CliResult<Option<V>>,
args: &[String])
-> CliResult<Option<V>>
{
let flags = try!(flags_from_args::<T>(args));
exec(flags)
@ -92,7 +116,11 @@ pub fn execute_main_without_stdin<'a, T: RepresentsFlags, V: Encodable<json::Enc
}
}
pub fn process_executed<'a, T: Encodable<json::Encoder<'a>, io::IoError>>(result: CliResult<Option<T>>, flags: GlobalFlags) {
pub fn process_executed<'a,
T: Encodable<json::Encoder<'a>, io::IoError>>(
result: CliResult<Option<T>>,
flags: GlobalFlags)
{
match result {
Err(e) => handle_error(e, flags.verbose),
Ok(encodable) => {
@ -130,20 +158,30 @@ fn args() -> Vec<String> {
fn flags_from_args<T: RepresentsFlags>(args: &[String]) -> CliResult<T> {
let mut decoder = FlagDecoder::new::<T>(args);
Decodable::decode(&mut decoder).map_err(|e: HammerError| CliError::new(e.message, 1))
Decodable::decode(&mut decoder).map_err(|e: HammerError| {
CliError::new(e.message, 1)
})
}
fn global_flags() -> CliResult<GlobalFlags> {
let mut decoder = FlagDecoder::new::<GlobalFlags>(args().tail());
Decodable::decode(&mut decoder).map_err(|e: HammerError| CliError::new(e.message, 1))
Decodable::decode(&mut decoder).map_err(|e: HammerError| {
CliError::new(e.message, 1)
})
}
fn json_from_stdin<T: RepresentsJSON>() -> CliResult<T> {
let mut reader = io::stdin();
let input = try!(reader.read_to_str().map_err(|_| CliError::new("Standard in did not exist or was not UTF-8", 1)));
let input = try!(reader.read_to_str().map_err(|_| {
CliError::new("Standard in did not exist or was not UTF-8", 1)
}));
let json = try!(json::from_str(input.as_slice()).map_err(|_| CliError::new("Could not parse standard in as JSON", 1)));
let json = try!(json::from_str(input.as_slice()).map_err(|_| {
CliError::new("Could not parse standard in as JSON", 1)
}));
let mut decoder = json::Decoder::new(json);
Decodable::decode(&mut decoder).map_err(|_| CliError::new("Could not process standard in as input", 1))
Decodable::decode(&mut decoder).map_err(|_| {
CliError::new("Could not process standard in as input", 1)
})
}

View File

@ -1,18 +1,26 @@
/**
* Cargo compile currently does the following steps:
*
* All configurations are already injected as environment variables via the main cargo command
*
* 1. Read the manifest
* 2. Shell out to `cargo-resolve` with a list of dependencies and sources as stdin
* a. Shell out to `--do update` and `--do list` for each source
* b. Resolve dependencies and return a list of name/version/source
* 3. Shell out to `--do download` for each source
* 4. Shell out to `--do get` for each source, and build up the list of paths to pass to rustc -L
* 5. Call `cargo-rustc` with the results of the resolver zipped together with the results of the `get`
* a. Topologically sort the dependencies
* b. Compile each dependency in order, passing in the -L's pointing at each previously compiled dependency
*/
//!
//! Cargo compile currently does the following steps:
//!
//! All configurations are already injected as environment variables via the
//! main cargo command
//!
//! 1. Read the manifest
//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as
//! stdin
//!
//! a. Shell out to `--do update` and `--do list` for each source
//! b. Resolve dependencies and return a list of name/version/source
//!
//! 3. Shell out to `--do download` for each source
//! 4. Shell out to `--do get` for each source, and build up the list of paths
//! to pass to rustc -L
//! 5. Call `cargo-rustc` with the results of the resolver zipped together with
//! the results of the `get`
//!
//! a. Topologically sort the dependencies
//! b. Compile each dependency in order, passing in the -L's pointing at each
//! previously compiled dependency
//!
use std::os;
use util::config::{ConfigValue};
@ -25,7 +33,8 @@ use util::{CargoResult, Wrap, config, internal, human};
pub fn compile(manifest_path: &Path) -> CargoResult<()> {
log!(4, "compile; manifest-path={}", manifest_path.display());
let mut source = PathSource::new(&SourceId::for_path(&manifest_path.dir_path()));
let id = SourceId::for_path(&manifest_path.dir_path());
let mut source = PathSource::new(&id);
try!(source.update());
@ -37,9 +46,14 @@ pub fn compile(manifest_path: &Path) -> CargoResult<()> {
let source_ids = package.get_source_ids();
let mut registry = try!(PackageRegistry::new(source_ids, override_ids));
let resolved = try!(resolver::resolve(package.get_dependencies(), &mut registry).wrap(human("unable to resolve dependencies")));
let resolved = try!(resolver::resolve(package.get_dependencies(),
&mut registry).wrap({
human("unable to resolve dependencies")
}));
let packages = try!(registry.get(resolved.as_slice()).wrap(human("unable to get packages from source")));
let packages = try!(registry.get(resolved.as_slice()).wrap({
human("unable to get packages from source")
}));
debug!("packages={}", packages);
@ -53,11 +67,15 @@ fn source_ids_from_config() -> CargoResult<Vec<SourceId>> {
debug!("loaded config; configs={}", configs);
let config_paths = configs.find_equiv(&"paths").map(|v| v.clone()).unwrap_or_else(|| ConfigValue::new());
let config_paths = configs.find_equiv(&"paths").map(|v| v.clone());
let config_paths = config_paths.unwrap_or_else(|| ConfigValue::new());
let paths: Vec<Path> = match config_paths.get_value() {
&config::String(_) => return Err(internal("The path was configured as a String instead of a List")),
&config::List(ref list) => list.iter().map(|path| Path::new(path.as_slice())).collect()
let paths: Vec<Path> = match *config_paths.get_value() {
config::String(_) => return Err(internal("The path was configured as \
a String instead of a List")),
config::List(ref list) => {
list.iter().map(|path| Path::new(path.as_slice())).collect()
}
};
Ok(paths.iter().map(|p| SourceId::for_path(p)).collect())

View File

@ -3,20 +3,29 @@ use util;
use core::{Package,Manifest,SourceId};
use util::{CargoResult, human};
pub fn read_manifest(contents: &[u8], source_id: &SourceId) -> CargoResult<(Manifest, Vec<Path>)> {
util::toml::to_manifest(contents, source_id).map_err(|err| human(err.to_str()))
pub fn read_manifest(contents: &[u8], source_id: &SourceId)
-> CargoResult<(Manifest, Vec<Path>)>
{
util::toml::to_manifest(contents, source_id).map_err(|err| {
human(err.to_str())
})
}
pub fn read_package(path: &Path, source_id: &SourceId) -> CargoResult<(Package, Vec<Path>)> {
pub fn read_package(path: &Path, source_id: &SourceId)
-> CargoResult<(Package, Vec<Path>)>
{
log!(5, "read_package; path={}; source-id={}", path.display(), source_id);
let mut file = cargo_try!(File::open(path));
let data = cargo_try!(file.read_to_end());
let (manifest, nested) = cargo_try!(read_manifest(data.as_slice(), source_id));
let (manifest, nested) = cargo_try!(read_manifest(data.as_slice(),
source_id));
Ok((Package::new(manifest, path), nested))
}
pub fn read_packages(path: &Path, source_id: &SourceId) -> CargoResult<Vec<Package>> {
pub fn read_packages(path: &Path, source_id: &SourceId)
-> CargoResult<Vec<Package>>
{
let (pkg, nested) = try!(read_package(&path.join("Cargo.toml"), source_id));
let mut ret = vec!(pkg);

View File

@ -30,7 +30,8 @@ pub fn compile_packages(pkg: &Package, deps: &PackageSet) -> CargoResult<()> {
Ok(())
}
fn compile_pkg(pkg: &Package, dest: &Path, deps_dir: &Path, primary: bool) -> CargoResult<()> {
fn compile_pkg(pkg: &Package, dest: &Path, deps_dir: &Path,
primary: bool) -> CargoResult<()> {
debug!("compile_pkg; pkg={}; targets={}", pkg, pkg.get_targets());
match pkg.get_manifest().get_build() {
@ -50,7 +51,9 @@ fn compile_pkg(pkg: &Package, dest: &Path, deps_dir: &Path, primary: bool) -> Ca
}
fn mk_target(target: &Path) -> CargoResult<()> {
io::fs::mkdir_recursive(target, io::UserRWX).chain_error(|| internal("could not create target directory"))
io::fs::mkdir_recursive(target, io::UserRWX).chain_error(|| {
internal("could not create target directory")
})
}
fn compile_custom(pkg: &Package, cmd: &str, dest: &Path, deps_dir: &Path,
@ -67,13 +70,15 @@ fn compile_custom(pkg: &Package, cmd: &str, dest: &Path, deps_dir: &Path,
p.exec_with_output().map(|_| ()).map_err(|e| e.mark_human())
}
fn rustc(root: &Path, target: &Target, dest: &Path, deps: &Path, verbose: bool) -> CargoResult<()> {
fn rustc(root: &Path, target: &Target, dest: &Path, deps: &Path,
verbose: bool) -> CargoResult<()> {
let crate_types = target.rustc_crate_types();
for crate_type in crate_types.iter() {
log!(5, "root={}; target={}; crate_type={}; dest={}; deps={}; verbose={}",
root.display(), target, crate_type, dest.display(), deps.display(), verbose);
root.display(), target, crate_type, dest.display(), deps.display(),
verbose);
let rustc = prepare_rustc(root, target, *crate_type, dest, deps);
@ -87,7 +92,8 @@ fn rustc(root: &Path, target: &Target, dest: &Path, deps: &Path, verbose: bool)
Ok(())
}
fn prepare_rustc(root: &Path, target: &Target, crate_type: &'static str, dest: &Path, deps: &Path) -> ProcessBuilder {
fn prepare_rustc(root: &Path, target: &Target, crate_type: &'static str,
dest: &Path, deps: &Path) -> ProcessBuilder {
let mut args = Vec::new();
build_base_args(&mut args, target, crate_type, dest);
@ -99,7 +105,8 @@ fn prepare_rustc(root: &Path, target: &Target, crate_type: &'static str, dest: &
.env("RUST_LOG", None) // rustc is way too noisy
}
fn build_base_args(into: &mut Args, target: &Target, crate_type: &'static str, dest: &Path) {
fn build_base_args(into: &mut Args, target: &Target, crate_type: &'static str,
dest: &Path) {
// TODO: Handle errors in converting paths into args
into.push(target.get_path().display().to_str());
into.push("--crate-type".to_str());

View File

@ -111,7 +111,8 @@ impl Source for GitSource {
}
fn get(&self, ids: &[PackageId]) -> CargoResult<Vec<Package>> {
log!(5, "getting packages for package ids `{}` from `{}`", ids, self.remote);
log!(5, "getting packages for package ids `{}` from `{}`", ids,
self.remote);
// TODO: Support multiple manifests per repo
let pkgs = try!(self.packages());

View File

@ -63,10 +63,8 @@ macro_rules! errln(
($($arg:tt)*) => (let _ = writeln!(::std::io::stdio::stderr(), $($arg)*))
)
/**
* GitRemote represents a remote repository. It gets cloned into a local GitDatabase.
*/
/// GitRemote represents a remote repository. It gets cloned into a local
/// GitDatabase.
#[deriving(PartialEq,Clone,Show)]
pub struct GitRemote {
url: Url,
@ -74,7 +72,7 @@ pub struct GitRemote {
#[deriving(PartialEq,Clone,Encodable)]
struct EncodableGitRemote {
url: String
url: String,
}
impl<E, S: Encoder<E>> Encodable<S, E> for GitRemote {
@ -85,11 +83,8 @@ impl<E, S: Encoder<E>> Encodable<S, E> for GitRemote {
}
}
/**
* GitDatabase is a local clone of a remote repository's database. Multiple GitCheckouts
* can be cloned from this GitDatabase.
*/
/// GitDatabase is a local clone of a remote repository's database. Multiple
/// GitCheckouts can be cloned from this GitDatabase.
#[deriving(PartialEq,Clone)]
pub struct GitDatabase {
remote: GitRemote,
@ -99,7 +94,7 @@ pub struct GitDatabase {
#[deriving(Encodable)]
pub struct EncodableGitDatabase {
remote: GitRemote,
path: String
path: String,
}
impl<E, S: Encoder<E>> Encodable<S, E> for GitDatabase {
@ -111,12 +106,9 @@ impl<E, S: Encoder<E>> Encodable<S, E> for GitDatabase {
}
}
/**
* GitCheckout is a local checkout of a particular revision. Calling `clone_into` with
* a reference will resolve the reference into a revision, and return a CargoError
* if no revision for that reference was found.
*/
/// GitCheckout is a local checkout of a particular revision. Calling
/// `clone_into` with a reference will resolve the reference into a revision,
/// and return a CargoError if no revision for that reference was found.
pub struct GitCheckout {
database: GitDatabase,
location: Path,
@ -129,7 +121,7 @@ pub struct EncodableGitCheckout {
database: GitDatabase,
location: String,
reference: String,
revision: String
revision: String,
}
impl<E, S: Encoder<E>> Encodable<S, E> for GitCheckout {
@ -143,9 +135,7 @@ impl<E, S: Encoder<E>> Encodable<S, E> for GitCheckout {
}
}
/**
* Implementations
*/
// Implementations
impl GitRemote {
pub fn new(url: &Url) -> GitRemote {
@ -167,7 +157,8 @@ impl GitRemote {
}
fn fetch_into(&self, path: &Path) -> CargoResult<()> {
Ok(git!(*path, "fetch --force --quiet --tags {} refs/heads/*:refs/heads/*", self.fetch_location()))
Ok(git!(*path, "fetch --force --quiet --tags {} \
refs/heads/*:refs/heads/*", self.fetch_location()))
}
fn clone_into(&self, path: &Path) -> CargoResult<()> {
@ -175,7 +166,8 @@ impl GitRemote {
cargo_try!(mkdir_recursive(path, UserDir));
Ok(git!(dirname, "clone {} {} --bare --no-hardlinks --quiet", self.fetch_location(), path.display()))
Ok(git!(dirname, "clone {} {} --bare --no-hardlinks --quiet",
self.fetch_location(), path.display()))
}
fn fetch_location(&self) -> String {
@ -191,8 +183,10 @@ impl GitDatabase {
&self.path
}
pub fn copy_to<S: Str>(&self, reference: S, dest: &Path) -> CargoResult<GitCheckout> {
let checkout = cargo_try!(GitCheckout::clone_into(dest, self.clone(), GitReference::for_str(reference.as_slice())));
pub fn copy_to<S: Str>(&self, reference: S,
dest: &Path) -> CargoResult<GitCheckout> {
let checkout = cargo_try!(GitCheckout::clone_into(dest, self.clone(),
GitReference::for_str(reference.as_slice())));
cargo_try!(checkout.fetch());
cargo_try!(checkout.update_submodules());
@ -207,9 +201,15 @@ impl GitDatabase {
}
impl GitCheckout {
fn clone_into(into: &Path, database: GitDatabase, reference: GitReference) -> CargoResult<GitCheckout> {
fn clone_into(into: &Path, database: GitDatabase,
reference: GitReference) -> CargoResult<GitCheckout> {
let revision = cargo_try!(database.rev_for(reference.as_slice()));
let checkout = GitCheckout { location: into.clone(), database: database, reference: reference, revision: revision };
let checkout = GitCheckout {
location: into.clone(),
database: database,
reference: reference,
revision: revision,
};
// If the git checkout already exists, we don't need to clone it again
if !checkout.location.join(".git").exists() {
@ -226,22 +226,28 @@ impl GitCheckout {
fn clone_repo(&self) -> CargoResult<()> {
let dirname = Path::new(self.location.dirname());
cargo_try!(mkdir_recursive(&dirname, UserDir).chain_error(||
human(format!("Couldn't mkdir {}", Path::new(self.location.dirname()).display()))));
cargo_try!(mkdir_recursive(&dirname, UserDir).chain_error(|| {
human(format!("Couldn't mkdir {}",
Path::new(self.location.dirname()).display()))
}));
if self.location.exists() {
cargo_try!(rmdir_recursive(&self.location).chain_error(||
human(format!("Couldn't rmdir {}", Path::new(&self.location).display()))));
cargo_try!(rmdir_recursive(&self.location).chain_error(|| {
human(format!("Couldn't rmdir {}",
Path::new(&self.location).display()))
}));
}
git!(dirname, "clone --no-checkout --quiet {} {}", self.get_source().display(), self.location.display());
git!(dirname, "clone --no-checkout --quiet {} {}",
self.get_source().display(), self.location.display());
cargo_try!(chmod(&self.location, AllPermissions));
Ok(())
}
fn fetch(&self) -> CargoResult<()> {
git!(self.location, "fetch --force --quiet --tags {}", self.get_source().display());
git!(self.location, "fetch --force --quiet --tags {}",
self.get_source().display());
cargo_try!(self.reset(self.revision.as_slice()));
Ok(())
}
@ -258,15 +264,19 @@ impl GitCheckout {
fn git(path: &Path, str: &str) -> ProcessBuilder {
debug!("Executing git {} @ {}", str, path.display());
process("git").args(str.split(' ').collect::<Vec<&str>>().as_slice()).cwd(path.clone())
process("git").args(str.split(' ').collect::<Vec<&str>>().as_slice())
.cwd(path.clone())
}
fn git_inherit(path: &Path, str: String) -> CargoResult<()> {
git(path, str.as_slice()).exec().chain_error(|| human(format!("Executing `git {}` failed", str)))
git(path, str.as_slice()).exec().chain_error(|| {
human(format!("Executing `git {}` failed", str))
})
}
fn git_output(path: &Path, str: String) -> CargoResult<String> {
let output = cargo_try!(git(path, str.as_slice()).exec_with_output().chain_error(||
let output = cargo_try!(git(path, str.as_slice()).exec_with_output()
.chain_error(||
human(format!("Executing `git {}` failed", str))));
Ok(to_str(output.output.as_slice()).as_slice().trim_right().to_str())

View File

@ -10,17 +10,13 @@ pub struct PathSource {
packages: Vec<Package>
}
/**
* TODO: Figure out if packages should be discovered in new or self should be
* mut and packages are discovered in update
*/
// TODO: Figure out if packages should be discovered in new or self should be
// mut and packages are discovered in update
impl PathSource {
/**
* Invoked with an absolute path to a directory that contains a Cargo.toml.
* The source will read the manifest and find any other packages contained
* in the directory structure reachable by the root manifest.
*/
/// Invoked with an absolute path to a directory that contains a Cargo.toml.
/// The source will read the manifest and find any other packages contained
/// in the directory structure reachable by the root manifest.
pub fn new(id: &SourceId) -> PathSource {
log!(5, "new; id={}", id);
assert!(id.is_path(), "does not represent a path source; id={}", id);

View File

@ -11,8 +11,9 @@ pub struct Config {
impl Config {
pub fn new() -> CargoResult<Config> {
Ok(Config {
home_path: cargo_try!(os::homedir()
.require(|| human("Couldn't find the home directory")))
home_path: cargo_try!(os::homedir().require(|| {
human("Couldn't find the home directory")
}))
})
}
@ -89,7 +90,9 @@ impl<E, S: Encoder<E>> Encodable<S, E> for ConfigValue {
impl fmt::Show for ConfigValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let paths: Vec<String> = self.path.iter().map(|p| p.display().to_str()).collect();
let paths: Vec<String> = self.path.iter().map(|p| {
p.display().to_str()
}).collect();
write!(f, "{} (from {})", self.value, paths)
}
}
@ -109,13 +112,16 @@ pub fn all_configs(pwd: Path) -> CargoResult<HashMap<String, ConfigValue>> {
Ok(map)
}
fn find_in_tree<T>(pwd: &Path, walk: |io::fs::File| -> CargoResult<T>) -> CargoResult<T> {
fn find_in_tree<T>(pwd: &Path,
walk: |io::fs::File| -> CargoResult<T>) -> CargoResult<T> {
let mut current = pwd.clone();
loop {
let possible = current.join(".cargo").join("config");
if possible.exists() {
let file = cargo_try!(io::fs::File::open(&possible).chain_error(|| internal("could not open file")));
let file = cargo_try!(io::fs::File::open(&possible).chain_error(|| {
internal("could not open file")
}));
match walk(file) {
Ok(res) => return Ok(res),
_ => ()
@ -128,14 +134,17 @@ fn find_in_tree<T>(pwd: &Path, walk: |io::fs::File| -> CargoResult<T>) -> CargoR
Err(internal(""))
}
fn walk_tree(pwd: &Path, walk: |io::fs::File| -> CargoResult<()>) -> CargoResult<()> {
fn walk_tree(pwd: &Path,
walk: |io::fs::File| -> CargoResult<()>) -> CargoResult<()> {
let mut current = pwd.clone();
let mut err = false;
loop {
let possible = current.join(".cargo").join("config");
if possible.exists() {
let file = cargo_try!(io::fs::File::open(&possible).chain_error(|| internal("could not open file")));
let file = cargo_try!(io::fs::File::open(&possible).chain_error(|| {
internal("could not open file")
}));
match walk(file) {
Err(_) => err = false,
_ => ()
@ -155,34 +164,48 @@ fn extract_config(file: io::fs::File, key: &str) -> CargoResult<ConfigValue> {
let root = cargo_try!(toml::parse_from_buffer(&mut buf));
let val = cargo_try!(root.lookup(key).require(|| internal("")));
let v = match val {
&toml::String(ref val) => String(val.clone()),
&toml::Array(ref val) => List(val.iter().map(|s: &toml::Value| s.to_str()).collect()),
let v = match *val {
toml::String(ref val) => String(val.clone()),
toml::Array(ref val) => {
List(val.iter().map(|s: &toml::Value| s.to_str()).collect())
}
_ => return Err(internal(""))
};
Ok(ConfigValue{ value: v, path: vec!(path) })
}
fn extract_all_configs(file: io::fs::File, map: &mut HashMap<String, ConfigValue>) -> CargoResult<()> {
fn extract_all_configs(file: io::fs::File,
map: &mut HashMap<String, ConfigValue>) -> CargoResult<()> {
let path = file.path().clone();
let mut buf = io::BufferedReader::new(file);
let root = cargo_try!(toml::parse_from_buffer(&mut buf).chain_error(||
internal(format!("could not parse Toml manifest; path={}", path.display()))));
let root = cargo_try!(toml::parse_from_buffer(&mut buf).chain_error(|| {
internal(format!("could not parse Toml manifest; path={}",
path.display()))
}));
let table = cargo_try!(root.get_table().require(||
internal(format!("could not parse Toml manifest; path={}", path.display()))));
let table = cargo_try!(root.get_table().require(|| {
internal(format!("could not parse Toml manifest; path={}",
path.display()))
}));
for (key, value) in table.iter() {
match value {
&toml::String(ref val) => { map.insert(key.clone(), ConfigValue { value: String(val.clone()), path: vec!(path.clone()) }); }
&toml::String(ref val) => {
map.insert(key.clone(), ConfigValue {
value: String(val.clone()),
path: vec!(path.clone())
});
}
&toml::Array(ref val) => {
let config = map.find_or_insert_with(key.clone(), |_| {
ConfigValue { path: vec!(), value: List(vec!()) }
});
cargo_try!(merge_array(config, val.as_slice(), &path).chain_error(||
internal(format!("The `{}` key in your config", key))));
cargo_try!(merge_array(config, val.as_slice(),
&path).chain_error(|| {
internal(format!("The `{}` key in your config", key))
}));
},
_ => ()
}
@ -191,15 +214,19 @@ fn extract_all_configs(file: io::fs::File, map: &mut HashMap<String, ConfigValue
Ok(())
}
fn merge_array(existing: &mut ConfigValue, val: &[toml::Value], path: &Path) -> CargoResult<()> {
fn merge_array(existing: &mut ConfigValue, val: &[toml::Value],
path: &Path) -> CargoResult<()> {
match existing.value {
String(_) => return Err(internal("should be an Array, but it was a String")),
String(_) => Err(internal("should be an Array, but it was a String")),
List(ref mut list) => {
let new_list: Vec<CargoResult<String>> = val.iter().map(|s: &toml::Value| toml_string(s)).collect();
let new_list: Vec<CargoResult<String>> =
val.iter().map(toml_string).collect();
if new_list.iter().any(|v| v.is_err()) {
return Err(internal("should be an Array of Strings, but was an Array of other values"));
return Err(internal("should be an Array of Strings, but \
was an Array of other values"));
} else {
let new_list: Vec<String> = new_list.move_iter().map(|v| v.unwrap()).collect();
let new_list: Vec<String> =
new_list.move_iter().map(|v| v.unwrap()).collect();
list.push_all(new_list.as_slice());
existing.path.push(path.clone());
Ok(())

View File

@ -254,7 +254,10 @@ impl CliError {
}
}
pub fn process_error<S: Str>(msg: S, command: &Command, status: Option<&ProcessExit>, output: Option<&ProcessOutput>) -> ProcessError {
pub fn process_error<S: Str>(msg: S,
command: &Command,
status: Option<&ProcessExit>,
output: Option<&ProcessOutput>) -> ProcessError {
ProcessError {
msg: msg.as_slice().to_str(),
command: command.to_str(),
@ -265,7 +268,8 @@ pub fn process_error<S: Str>(msg: S, command: &Command, status: Option<&ProcessE
}
}
pub fn internal_error<S1: Str, S2: Str>(error: S1, detail: S2) -> Box<CargoError> {
pub fn internal_error<S1: Str, S2: Str>(error: S1,
detail: S2) -> Box<CargoError> {
box ConcreteCargoError {
description: error.as_slice().to_str(),
detail: Some(detail.as_slice().to_str()),

View File

@ -1,7 +1,8 @@
pub use self::config::Config;
pub use self::process_builder::{process,ProcessBuilder};
pub use self::process_builder::{process, ProcessBuilder};
pub use self::result::{Wrap, Require};
pub use self::errors::{CargoResult, CargoError, BoxError, ChainError, CliResult, CliError, ProcessError};
pub use self::errors::{CargoResult, CargoError, BoxError, ChainError, CliResult};
pub use self::errors::{CliError, ProcessError};
pub use self::errors::{process_error, internal_error, internal, human};
pub use self::paths::realpath;

View File

@ -1,8 +1,8 @@
use std::fmt;
use std::fmt::{Show,Formatter};
use std::fmt::{Show, Formatter};
use std::os;
use std::path::Path;
use std::io::process::{Command,ProcessOutput,InheritFd};
use std::io::process::{Command, ProcessOutput, InheritFd};
use util::{ProcessError, process_error};
use std::collections::HashMap;
@ -77,9 +77,12 @@ impl ProcessBuilder {
.stdout(InheritFd(1))
.stderr(InheritFd(2));
let msg = || format!("Could not execute process `{}`", self.debug_string());
let msg = || format!("Could not execute process `{}`",
self.debug_string());
let exit = try!(command.status().map_err(|_| process_error(msg(), &command, None, None)));
let exit = try!(command.status().map_err(|_| {
process_error(msg(), &command, None, None)
}));
if exit.success() {
Ok(())
@ -92,14 +95,18 @@ impl ProcessBuilder {
let mut command = self.build_command();
command.env(self.build_env().as_slice());
let msg = || format!("Could not execute process `{}`", self.debug_string());
let msg = || format!("Could not execute process `{}`",
self.debug_string());
let output = try!(command.output().map_err(|_| process_error(msg(), &command, None, None)));
let output = try!(command.output().map_err(|_| {
process_error(msg(), &command, None, None)
}));
if output.status.success() {
Ok(output)
} else {
Err(process_error(msg(), &command, Some(&output.status), Some(&output)))
Err(process_error(msg(), &command, Some(&output.status),
Some(&output)))
}
}

View File

@ -9,15 +9,23 @@ use core::manifest::{LibKind,Lib};
use core::{Summary,Manifest,Target,Dependency,PackageId};
use util::{CargoResult, Require, human};
pub fn to_manifest(contents: &[u8], source_id: &SourceId) -> CargoResult<(Manifest, Vec<Path>)> {
let root = try!(toml::parse_from_bytes(contents).map_err(|_| human("Cargo.toml is not valid Toml")));
let toml = try!(toml_to_manifest(root).map_err(|_| human("Cargo.toml is not a valid manifest")));
pub fn to_manifest(contents: &[u8],
source_id: &SourceId) -> CargoResult<(Manifest, Vec<Path>)> {
let root = try!(toml::parse_from_bytes(contents).map_err(|_| {
human("Cargo.toml is not valid Toml")
}));
let toml = try!(toml_to_manifest(root).map_err(|_| {
human("Cargo.toml is not a valid manifest")
}));
toml.to_manifest(source_id)
}
fn toml_to_manifest(root: toml::Value) -> CargoResult<TomlManifest> {
fn decode<T: Decodable<toml::Decoder,toml::Error>>(root: &toml::Value, path: &str) -> Result<T, toml::Error> {
fn decode<T: Decodable<toml::Decoder,toml::Error>>(root: &toml::Value,
path: &str)
-> Result<T, toml::Error>
{
let root = match root.lookup(path) {
Some(val) => val,
None => return Err(toml::ParseError)
@ -33,27 +41,35 @@ fn toml_to_manifest(root: toml::Value) -> CargoResult<TomlManifest> {
let deps = match deps {
Some(deps) => {
let table = try!(deps.get_table().require(|| human("dependencies must be a table"))).clone();
let table = try!(deps.get_table().require(|| {
human("dependencies must be a table")
})).clone();
let mut deps: HashMap<String, TomlDependency> = HashMap::new();
for (k, v) in table.iter() {
match v {
&toml::String(ref string) => { deps.insert(k.clone(), SimpleDep(string.clone())); },
&toml::String(ref string) => {
deps.insert(k.clone(), SimpleDep(string.clone()));
},
&toml::Table(ref table) => {
let mut details = HashMap::<String, String>::new();
for (k, v) in table.iter() {
let v = try!(v.get_str()
.require(|| human("dependency values must be string")));
let v = try!(v.get_str().require(|| {
human("dependency values must be string")
}));
details.insert(k.clone(), v.clone());
}
let version = try!(details.find_equiv(&"version")
.require(|| human("dependencies must include a version"))).clone();
.require(|| {
human("dependencies must include a version")
})).clone();
deps.insert(k.clone(), DetailedDep(DetailedTomlDependency {
deps.insert(k.clone(),
DetailedDep(DetailedTomlDependency {
version: version,
other: details
}));
@ -67,7 +83,12 @@ fn toml_to_manifest(root: toml::Value) -> CargoResult<TomlManifest> {
None => None
};
Ok(TomlManifest { project: box project, lib: lib, bin: bin, dependencies: deps })
Ok(TomlManifest {
project: box project,
lib: lib,
bin: bin,
dependencies: deps,
})
}
type TomlLibTarget = TomlTarget;
@ -112,12 +133,15 @@ impl TomlProject {
}
impl TomlManifest {
pub fn to_manifest(&self, source_id: &SourceId) -> CargoResult<(Manifest, Vec<Path>)> {
pub fn to_manifest(&self, source_id: &SourceId)
-> CargoResult<(Manifest, Vec<Path>)>
{
let mut sources = vec!();
let mut nested_paths = vec!();
// Get targets
let targets = normalize(self.lib.as_ref().map(|l| l.as_slice()), self.bin.as_ref().map(|b| b.as_slice()));
let targets = normalize(self.lib.as_ref().map(|l| l.as_slice()),
self.bin.as_ref().map(|b| b.as_slice()));
if targets.is_empty() {
debug!("manifest has no build targets; project={}", self.project);
@ -134,7 +158,8 @@ impl TomlManifest {
(string.clone(), SourceId::for_central())
},
DetailedDep(ref details) => {
let new_source_id = details.other.find_equiv(&"git").map(|git| {
let new_source_id = details.other.find_equiv(&"git");
let new_source_id = new_source_id.map(|git| {
// TODO: Don't unwrap here
let kind = GitKind("master".to_str());
let url = url::from_str(git.as_slice()).unwrap();
@ -153,7 +178,9 @@ impl TomlManifest {
}
};
deps.push(try!(Dependency::parse(n.as_slice(), version.as_slice(), &source_id)))
deps.push(try!(Dependency::parse(n.as_slice(),
version.as_slice(),
&source_id)))
}
}
None => ()
@ -177,17 +204,22 @@ struct TomlTarget {
path: Option<String>
}
fn normalize(lib: Option<&[TomlLibTarget]>, bin: Option<&[TomlBinTarget]>) -> Vec<Target> {
fn normalize(lib: Option<&[TomlLibTarget]>,
bin: Option<&[TomlBinTarget]>) -> Vec<Target> {
log!(4, "normalizing toml targets; lib={}; bin={}", lib, bin);
fn lib_targets(dst: &mut Vec<Target>, libs: &[TomlLibTarget]) {
let l = &libs[0];
let path = l.path.clone().unwrap_or_else(|| format!("src/{}.rs", l.name));
let crate_types = l.crate_type.clone().and_then(|kinds| LibKind::from_strs(kinds).ok()).unwrap_or_else(|| vec!(Lib));
dst.push(Target::lib_target(l.name.as_slice(), crate_types, &Path::new(path)));
let crate_types = l.crate_type.clone().and_then(|kinds| {
LibKind::from_strs(kinds).ok()
}).unwrap_or_else(|| vec!(Lib));
dst.push(Target::lib_target(l.name.as_slice(), crate_types,
&Path::new(path)));
}
fn bin_targets(dst: &mut Vec<Target>, bins: &[TomlBinTarget], default: |&TomlBinTarget| -> String) {
fn bin_targets(dst: &mut Vec<Target>, bins: &[TomlBinTarget],
default: |&TomlBinTarget| -> String) {
for bin in bins.iter() {
let path = bin.path.clone().unwrap_or_else(|| default(bin));
dst.push(Target::bin_target(bin.name.as_slice(), &Path::new(path)));
@ -199,13 +231,15 @@ fn normalize(lib: Option<&[TomlLibTarget]>, bin: Option<&[TomlBinTarget]>) -> Ve
match (lib, bin) {
(Some(ref libs), Some(ref bins)) => {
lib_targets(&mut ret, libs.as_slice());
bin_targets(&mut ret, bins.as_slice(), |bin| format!("src/bin/{}.rs", bin.name));
bin_targets(&mut ret, bins.as_slice(),
|bin| format!("src/bin/{}.rs", bin.name));
},
(Some(ref libs), None) => {
lib_targets(&mut ret, libs.as_slice());
},
(None, Some(ref bins)) => {
bin_targets(&mut ret, bins.as_slice(), |bin| format!("src/{}.rs", bin.name));
bin_targets(&mut ret, bins.as_slice(),
|bin| format!("src/{}.rs", bin.name));
},
(None, None) => ()
}

View File

@ -37,10 +37,12 @@ impl FileBuilder {
let mut file = try!(
fs::File::create(&self.path)
.with_err_msg(format!("Could not create file; path={}", self.path.display())));
.with_err_msg(format!("Could not create file; path={}",
self.path.display())));
file.write_str(self.body.as_slice())
.with_err_msg(format!("Could not write to file; path={}", self.path.display()))
.with_err_msg(format!("Could not write to file; path={}",
self.path.display()))
}
fn dirname(&self) -> Path {
@ -80,7 +82,8 @@ impl ProjectBuilder {
.extra_path(cargo_dir())
}
pub fn file<B: BytesContainer, S: Str>(mut self, path: B, body: S) -> ProjectBuilder {
pub fn file<B: BytesContainer, S: Str>(mut self, path: B,
body: S) -> ProjectBuilder {
self.files.push(FileBuilder::new(self.root.join(path), body.as_slice()));
self
}
@ -126,12 +129,14 @@ pub fn project(name: &str) -> ProjectBuilder {
pub fn mkdir_recursive(path: &Path) -> Result<(), String> {
fs::mkdir_recursive(path, io::UserDir)
.with_err_msg(format!("could not create directory; path={}", path.display()))
.with_err_msg(format!("could not create directory; path={}",
path.display()))
}
pub fn rmdir_recursive(path: &Path) -> Result<(), String> {
fs::rmdir_recursive(path)
.with_err_msg(format!("could not rm directory; path={}", path.display()))
.with_err_msg(format!("could not rm directory; path={}",
path.display()))
}
pub fn main_file<T: Str>(println: T, deps: &[&str]) -> String {
@ -188,80 +193,92 @@ struct Execs {
impl Execs {
pub fn with_stdout<S: ToStr>(mut ~self, expected: S) -> Box<Execs> {
self.expect_stdout = Some(expected.to_str());
self
}
pub fn with_stderr<S: ToStr>(mut ~self, expected: S) -> Box<Execs> {
self.expect_stderr = Some(expected.to_str());
self
}
pub fn with_status(mut ~self, expected: int) -> Box<Execs> {
self.expect_exit_code = Some(expected);
self
}
fn match_output(&self, actual: &ProcessOutput) -> ham::MatchResult {
self.match_status(actual)
.and(self.match_stdout(actual))
.and(self.match_stderr(actual))
}
fn match_status(&self, actual: &ProcessOutput) -> ham::MatchResult {
match self.expect_exit_code {
None => ham::success(),
Some(code) => {
ham::expect(
actual.status.matches_exit_status(code),
format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}",
actual.status,
str::from_utf8(actual.output.as_slice()),
str::from_utf8(actual.error.as_slice())))
}
pub fn with_stdout<S: ToStr>(mut ~self, expected: S) -> Box<Execs> {
self.expect_stdout = Some(expected.to_str());
self
}
}
fn match_stdout(&self, actual: &ProcessOutput) -> ham::MatchResult {
self.match_std(self.expect_stdout.as_ref(), actual.output.as_slice(), "stdout", actual.error.as_slice())
}
pub fn with_stderr<S: ToStr>(mut ~self, expected: S) -> Box<Execs> {
self.expect_stderr = Some(expected.to_str());
self
}
fn match_stderr(&self, actual: &ProcessOutput) -> ham::MatchResult {
self.match_std(self.expect_stderr.as_ref(), actual.error.as_slice(), "stderr", actual.output.as_slice())
}
pub fn with_status(mut ~self, expected: int) -> Box<Execs> {
self.expect_exit_code = Some(expected);
self
}
fn match_std(&self, expected: Option<&String>, actual: &[u8], description: &str, extra: &[u8]) -> ham::MatchResult {
match expected.as_ref().map(|s| s.as_slice()) {
None => ham::success(),
Some(out) => {
match str::from_utf8(actual) {
None => Err(format!("{} was not utf8 encoded", description)),
Some(actual) => {
ham::expect(actual == out, format!("{} was:\n`{}`\n\nexpected:\n`{}`\n\nother output:\n`{}`", description, actual, out, str::from_utf8_lossy(extra)))
}
fn match_output(&self, actual: &ProcessOutput) -> ham::MatchResult {
self.match_status(actual)
.and(self.match_stdout(actual))
.and(self.match_stderr(actual))
}
fn match_status(&self, actual: &ProcessOutput) -> ham::MatchResult {
match self.expect_exit_code {
None => ham::success(),
Some(code) => {
ham::expect(
actual.status.matches_exit_status(code),
format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}",
actual.status,
str::from_utf8(actual.output.as_slice()),
str::from_utf8(actual.error.as_slice())))
}
}
}
fn match_stdout(&self, actual: &ProcessOutput) -> ham::MatchResult {
self.match_std(self.expect_stdout.as_ref(), actual.output.as_slice(),
"stdout", actual.error.as_slice())
}
fn match_stderr(&self, actual: &ProcessOutput) -> ham::MatchResult {
self.match_std(self.expect_stderr.as_ref(), actual.error.as_slice(),
"stderr", actual.output.as_slice())
}
fn match_std(&self, expected: Option<&String>, actual: &[u8],
description: &str, extra: &[u8]) -> ham::MatchResult {
match expected.as_ref().map(|s| s.as_slice()) {
None => ham::success(),
Some(out) => {
match str::from_utf8(actual) {
None => Err(format!("{} was not utf8 encoded", description)),
Some(actual) => {
ham::expect(actual == out,
format!("{} was:\n\
`{}`\n\n\
expected:\n\
`{}`\n\n\
other output:\n\
`{}`", description, actual, out,
str::from_utf8_lossy(extra)))
}
}
}
}
}
}
}
}
impl ham::SelfDescribing for Execs {
fn describe(&self) -> String {
"execs".to_str()
}
fn describe(&self) -> String {
"execs".to_str()
}
}
impl ham::Matcher<ProcessBuilder> for Execs {
fn matches(&self, process: ProcessBuilder) -> ham::MatchResult {
let res = process.exec_with_output();
fn matches(&self, process: ProcessBuilder) -> ham::MatchResult {
let res = process.exec_with_output();
match res {
Ok(out) => self.match_output(&out),
Err(ProcessError { output: Some(ref out), .. }) => self.match_output(out),
Err(e) => Err(format!("could not exec process {}: {}", process, e))
match res {
Ok(out) => self.match_output(&out),
Err(ProcessError { output: Some(ref out), .. }) => {
self.match_output(out)
}
Err(e) => Err(format!("could not exec process {}: {}", process, e))
}
}
}
}
pub fn execs() -> Box<Execs> {
@ -285,10 +302,13 @@ impl ham::SelfDescribing for ShellWrites {
}
impl<'a> ham::Matcher<&'a mut shell::Shell<std::io::MemWriter>> for ShellWrites {
fn matches(&self, actual: &mut shell::Shell<std::io::MemWriter>) -> ham::MatchResult {
fn matches(&self, actual: &mut shell::Shell<std::io::MemWriter>)
-> ham::MatchResult
{
use term::Terminal;
let actual = std::str::from_utf8_lossy(actual.get_ref().get_ref()).to_str();
let actual = std::str::from_utf8_lossy(actual.get_ref().get_ref());
let actual = actual.to_str();
ham::expect(actual == self.expected, actual)
}
}

View File

@ -45,9 +45,7 @@ impl PathExt for Path {
}
}
/**
* Ensure required test directories exist and are empty
*/
/// Ensure required test directories exist and are empty
pub fn setup() {
let my_id = unsafe { NEXT_ID.fetch_add(1, atomics::SeqCst) };
task_id.replace(Some(my_id));

View File

@ -51,7 +51,8 @@ test!(cargo_compile_without_manifest {
assert_that(p.cargo_process("cargo-compile"),
execs()
.with_status(102)
.with_stderr("Could not find Cargo.toml in this directory or any parent directory"));
.with_stderr("Could not find Cargo.toml in this directory or any \
parent directory"));
})
test!(cargo_compile_with_invalid_code {
@ -82,7 +83,12 @@ test!(cargo_compile_with_warnings_in_the_root_package {
assert_that(p.cargo_process("cargo-compile"),
execs()
.with_stderr("src/foo.rs:1:14: 1:26 warning: code is never used: `dead`, #[warn(dead_code)] on by default\nsrc/foo.rs:1 fn main() {} fn dead() {}\n ^~~~~~~~~~~~\n"));
.with_stderr("\
src/foo.rs:1:14: 1:26 warning: code is never used: `dead`, #[warn(dead_code)] \
on by default
src/foo.rs:1 fn main() {} fn dead() {}
^~~~~~~~~~~~
"));
})
test!(cargo_compile_with_warnings_in_a_dep_package {
@ -108,7 +114,8 @@ test!(cargo_compile_with_warnings_in_a_dep_package {
name = "foo"
"#)
.file("src/foo.rs", main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("src/foo.rs",
main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("bar/Cargo.toml", r#"
[project]
@ -133,7 +140,8 @@ test!(cargo_compile_with_warnings_in_a_dep_package {
assert_that(p.cargo_process("cargo-compile"),
execs()
.with_stdout(format!("Compiling bar v0.5.0 (file:{})\nCompiling foo v0.5.0 (file:{})\n",
.with_stdout(format!("Compiling bar v0.5.0 (file:{})\n\
Compiling foo v0.5.0 (file:{})\n",
bar.display(), main.display()))
.with_stderr(""));
@ -168,7 +176,8 @@ test!(cargo_compile_with_nested_deps_shorthand {
name = "foo"
"#)
.file("src/foo.rs", main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("src/foo.rs",
main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("bar/Cargo.toml", r#"
[project]
@ -243,7 +252,8 @@ test!(cargo_compile_with_nested_deps_longhand {
name = "foo"
"#)
.file("src/foo.rs", main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("src/foo.rs",
main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("bar/Cargo.toml", r#"
[project]

View File

@ -8,7 +8,9 @@ use cargo::util::{ProcessError, process};
fn setup() {
}
fn git_repo(name: &str, callback: |ProjectBuilder| -> ProjectBuilder) -> Result<ProjectBuilder, ProcessError> {
fn git_repo(name: &str, callback: |ProjectBuilder| -> ProjectBuilder)
-> Result<ProjectBuilder, ProcessError>
{
let gitconfig = paths::home().join(".gitconfig");
if !gitconfig.exists() {
@ -30,7 +32,8 @@ fn git_repo(name: &str, callback: |ProjectBuilder| -> ProjectBuilder) -> Result<
log!(5, "git add .");
try!(git_project.process("git").args(["add", "."]).exec_with_output());
log!(5, "git commit");
try!(git_project.process("git").args(["commit", "-m", "Initial commit"]).exec_with_output());
try!(git_project.process("git").args(["commit", "-m", "Initial commit"])
.exec_with_output());
Ok(git_project)
}
@ -80,8 +83,11 @@ test!(cargo_compile_simple_git_dep {
assert_that(project.cargo_process("cargo-compile"),
execs()
.with_stdout(format!("Updating git repository `file:{}`\nCompiling dep1 v0.5.0 (file:{})\nCompiling foo v0.5.0 (file:{})\n",
git_root.display(), git_root.display(), root.display()))
.with_stdout(format!("Updating git repository `file:{}`\n\
Compiling dep1 v0.5.0 (file:{})\n\
Compiling foo v0.5.0 (file:{})\n",
git_root.display(), git_root.display(),
root.display()))
.with_stderr(""));
assert_that(&project.root().join("target/foo"), existing_file());
@ -152,7 +158,8 @@ test!(cargo_compile_with_nested_paths {
name = "parent"
"#, git_project.root().display()))
.file("src/parent.rs", main_file(r#""{}", dep1::hello()"#, ["dep1"]).as_slice());
.file("src/parent.rs",
main_file(r#""{}", dep1::hello()"#, ["dep1"]).as_slice());
p.cargo_process("cargo-compile")
.exec_with_output()

View File

@ -24,7 +24,8 @@ test!(cargo_compile_with_nested_deps_shorthand {
name = "foo"
"#)
.file("src/foo.rs", main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("src/foo.rs",
main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
.file("bar/Cargo.toml", r#"
[project]

View File

@ -9,28 +9,33 @@ fn setup() {
}
test!(non_tty {
Shell::create(MemWriter::new(), ShellConfig { color: true, verbose: true, tty: false }).assert().tap(|shell| {
let config = ShellConfig { color: true, verbose: true, tty: false };
Shell::create(MemWriter::new(), config).assert().tap(|shell| {
shell.say("Hey Alex", color::RED).assert();
assert_that(shell, shell_writes("Hey Alex\n"));
});
})
test!(color_explicitly_disabled {
Shell::create(MemWriter::new(), ShellConfig { color: false, verbose: true, tty: true }).assert().tap(|shell| {
let config = ShellConfig { color: false, verbose: true, tty: true };
Shell::create(MemWriter::new(), config).assert().tap(|shell| {
shell.say("Hey Alex", color::RED).assert();
assert_that(shell, shell_writes("Hey Alex\n"));
});
})
test!(colored_shell {
Shell::create(MemWriter::new(), ShellConfig { color: true, verbose: true, tty: true }).assert().tap(|shell| {
let config = ShellConfig { color: true, verbose: true, tty: true };
Shell::create(MemWriter::new(), config).assert().tap(|shell| {
shell.say("Hey Alex", color::RED).assert();
assert_that(shell, shell_writes(colored_output("Hey Alex\n", color::RED).assert()));
assert_that(shell, shell_writes(colored_output("Hey Alex\n",
color::RED).assert()));
});
})
fn colored_output<S: Str>(string: S, color: color::Color) -> IoResult<String> {
let mut term: TerminfoTerminal<MemWriter> = Terminal::new(MemWriter::new()).assert();
let mut term: TerminfoTerminal<MemWriter> =
Terminal::new(MemWriter::new()).assert();
try!(term.reset());
try!(term.fg(color));
try!(term.write_str(string.as_slice()));