Compare commits

...

3 Commits

Author SHA1 Message Date
R Tyler Croy 64fef7a55e Add some test cases as integration tests 2020-11-03 16:12:53 -08:00
R Tyler Croy 0d1f30665b Update the grammar to parse out stage-level properties.
Right now I'm expecting this to work for a `name` key but there may be others
that come to mind shortly
2020-11-03 15:42:59 -08:00
R Tyler Croy a39c6f3ffd cargo fmt 2020-11-03 15:22:47 -08:00
14 changed files with 196 additions and 40 deletions

1
Cargo.lock generated
View File

@ -1497,6 +1497,7 @@ dependencies = [
"otto-models",
"pest",
"pest_derive",
"pretty_env_logger 0.4.0",
"serde_yaml",
]

View File

@ -40,7 +40,6 @@ pub enum LogStream {
Stderr,
}
#[derive(Clone, Debug)]
struct LoadedManifest {
manifest: osp::Manifest,

View File

@ -26,6 +26,12 @@ impl Default for Pipeline {
/**
* Possible statuses that a Pipeline can have
*
* Each of the statuses are mapped to an i32 value such that shell exit codes can easily be used to
* set the pipeline status.
*
* For example, if a step's invocation returns a 3 exit code, then the agent should automatically
* know to the set the pipeline status to Unstable
*/
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum Status {
@ -43,15 +49,15 @@ pub enum Status {
pub struct Context {
#[serde(default = "generate_uuid")]
pub uuid: Uuid,
pub name: String,
pub properties: HashMap<String, String>,
pub environment: Option<HashMap<String, String>>,
}
impl Context {
pub fn new(name: String) -> Self {
impl Default for Context {
fn default() -> Self {
Self {
uuid: generate_uuid(),
name,
properties: HashMap::default(),
environment: None,
}
}
@ -74,7 +80,9 @@ impl Step {
pub fn new(context: Uuid, symbol: String, parameters: StepParameters) -> Self {
Self {
uuid: generate_uuid(),
context, symbol, parameters,
context,
symbol,
parameters,
}
}
}

View File

@ -9,4 +9,5 @@ log = "~0.4.11"
otto-models = { path = "../models" }
pest = "~2.1.3"
pest_derive = "~2.1.0"
pretty_env_logger = "~0.4.0"
serde_yaml = "~0.8.13"

View File

@ -4,8 +4,8 @@ extern crate pest_derive;
use log::*;
use otto_models::*;
use pest::Parser;
use pest::iterators::Pairs;
use pest::Parser;
#[derive(Parser)]
#[grammar = "pipeline.pest"]
@ -18,9 +18,8 @@ fn parse_str(parser: &mut pest::iterators::Pair<Rule>) -> String {
match parsed.as_rule() {
Rule::STRV => {
return parsed.as_str().to_string();
},
_ => {
},
}
_ => {}
}
}
"".to_string()
@ -29,13 +28,32 @@ fn parse_str(parser: &mut pest::iterators::Pair<Rule>) -> String {
fn parse_stage(parser: &mut Pairs<Rule>) -> (Context, Vec<Step>) {
use pest::iterators::Pair;
let stage = Context::new("Fake".to_string());
let mut stage = Context::default();
let mut steps: Vec<Step> = vec![];
debug!("stage: {:?}", parser);
while let Some(parsed) = parser.next() {
match parsed.as_rule() {
Rule::property => {
let mut inner = parsed.into_inner();
while let Some(parsed) = inner.next() {
match parsed.as_rule() {
Rule::IDENT => {
let key = parsed.as_str().to_string();
// This pair should be a STR
if let Some(pair) = inner.next() {
let value = pair.into_inner().as_str().to_string();
debug!("Adding to context key: {}, value: {}", key, value);
stage.properties.insert(key, value);
}
}
_ => {}
}
}
}
Rule::steps => {
let mut inner = parsed.into_inner();
@ -55,15 +73,14 @@ fn parse_stage(parser: &mut Pairs<Rule>) -> (Context, Vec<Step>) {
steps.push(step);
}
}
},
_ => {
},
}
_ => {}
}
}
(stage, steps)
}
fn parse_pipeline_string(buffer: &str) -> Result<Pipeline, pest::error::Error<Rule>> {
pub fn parse_pipeline_string(buffer: &str) -> Result<Pipeline, pest::error::Error<Rule>> {
let mut parser = PipelineParser::parse(Rule::pipeline, buffer)?;
let mut pipeline = Pipeline::default();
@ -78,13 +95,11 @@ fn parse_pipeline_string(buffer: &str) -> Result<Pipeline, pest::error::Error<Ru
pipeline.contexts.push(ctx);
pipeline.steps.append(&mut steps);
}
_ => {
},
_ => {}
}
}
},
_ => {
},
}
_ => {}
}
}
@ -97,47 +112,64 @@ mod tests {
#[test]
fn parse_steps() {
let steps = PipelineParser::parse(Rule::steps,
let steps = PipelineParser::parse(
Rule::steps,
r#"steps {
sh 'ls'
}
"#)
.unwrap().next().unwrap();
"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_steps_positional_args() {
let steps = PipelineParser::parse(Rule::steps,
let steps = PipelineParser::parse(
Rule::steps,
r#"steps {
sh 'ls', 'utf-8', 'lolwut'
}
"#)
.unwrap().next().unwrap();
"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_steps_keyword_arg() {
let steps = PipelineParser::parse(Rule::steps,
let steps = PipelineParser::parse(
Rule::steps,
r#"steps {
sh script: 'ls'
}
"#)
.unwrap().next().unwrap();
"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_steps_keyword_args() {
let steps = PipelineParser::parse(Rule::steps,
let steps = PipelineParser::parse(
Rule::steps,
r#"steps {
sh script: 'ls', label: 'lolwut'
}
"#)
.unwrap().next().unwrap();
"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn it_works() {
let pipeline = PipelineParser::parse(Rule::pipeline,
let pipeline = PipelineParser::parse(
Rule::pipeline,
r#"
pipeline {
stages {
@ -157,7 +189,11 @@ mod tests {
}
}
}
"#).unwrap().next().unwrap();
"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
@ -177,6 +213,7 @@ mod tests {
let pipeline = parse_pipeline_string(&buf).expect("Failed to parse");
assert!(!pipeline.uuid.is_nil());
assert_eq!(pipeline.contexts.len(), 1);
assert!(pipeline.contexts[0].properties.contains_key("name"));
assert_eq!(pipeline.steps.len(), 1);
}

View File

@ -5,7 +5,7 @@ pipeline = _{ SOI ~ "pipeline" ~ BLOCK_BEGIN ~ stages+ ~ BLOCK_END ~ EOI }
stages = { "stages" ~ BLOCK_BEGIN ~ stage+ ~ BLOCK_END }
stage = { "stage" ~
BLOCK_BEGIN ~
("name" ~ "=" ~ STR) ~
(property*) ~
steps ~
BLOCK_END }
@ -16,9 +16,12 @@ step = { IDENT ~ (
| kwargs
)
}
args = _{ (STR ~ COMMA?)+ }
kwargs = _{ (kwarg ~ COMMA?)+ }
kwarg = _{ IDENT~ ":" ~ STR }
property = { IDENT ~ "=" ~ STR }
IDENT = @{ ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "_")* }
BLOCK_BEGIN = @{ "{" }

View File

@ -0,0 +1,2 @@

View File

@ -0,0 +1 @@
pipeline { }

View File

@ -0,0 +1,9 @@
pipeline {
stages {
stage {
name = 'Build'
steps {
}
}
}
}

View File

@ -0,0 +1,24 @@
pipeline {
stages {
stage {
name = 'Build'
steps {
sh 'ls'
}
}
stage {
name
=
'Deploy'
steps
{
sh 'ls -lah && touch deploy.lock'
sh 'make deploy'
}
}
}
}

View File

@ -0,0 +1,10 @@
pipeline {
stages {
stage {
name = 'Build'
steps {
sh 'ls'
}
}
}
}

View File

@ -0,0 +1,10 @@
pipeline {
stages {
stage {
name = 'Build'
steps {
sh script: 'ls'
}
}
}
}

48
parser/tests/valid.rs Normal file
View File

@ -0,0 +1,48 @@
/*
* This test module will test everything in test_data/valid
*/
use otto_parser::*;
use std::fs::ReadDir;
use std::path::PathBuf;
fn parse_file(path: &PathBuf) -> Result<otto_models::Pipeline, pest::error::Error<Rule>> {
use std::fs::File;
use std::io::Read;
let mut file = File::open(path).expect(&format!("Failed to open {:?}", path));
let mut contents = String::new();
file.read_to_string(&mut contents)
.expect("Failed to read file into string");
parse_pipeline_string(&contents)
}
fn test_in_dir(dir: &mut ReadDir, can_parse: bool) {
for entry in dir {
if let Ok(entry) = entry {
let path = entry.path();
match path.as_path().extension() {
Some(ext) => {
if ext == "otto" {
let result = parse_file(&path);
assert_eq!(can_parse, result.is_ok());
}
}
_ => {}
}
}
}
}
#[test]
fn test_valid_pipelines() {
let mut dir = std::fs::read_dir("./test_data/valid").expect("Failed to read directory");
test_in_dir(&mut dir, true);
}
#[test]
fn test_invalid_pipelines() {
let mut dir = std::fs::read_dir("./test_data/invalid").expect("Failed to read directory");
test_in_dir(&mut dir, false);
}

View File

@ -21,7 +21,9 @@ async fn main() -> std::io::Result<()> {
let artifact_path = format!("{}/{}", endpoint.url, invoke.parameters.name);
let response = surf::get(artifact_path).await.expect("Failed to query object-store");
let response = surf::get(artifact_path)
.await
.expect("Failed to query object-store");
if response.status() == 200 {
let file = OpenOptions::new()
@ -30,11 +32,12 @@ async fn main() -> std::io::Result<()> {
.open(&invoke.parameters.name)
.await?;
io::copy(response, file).await?;
}
else {
return Err(Error::new(ErrorKind::NotFound, "Could not locate the artifact"));
} else {
return Err(Error::new(
ErrorKind::NotFound,
"Could not locate the artifact",
));
}
Ok(())
}