Merge pull request #7 from rtyler/complex-deploy

Add a modified (redacted) complex deployment pipeline for some ECS mess
This commit is contained in:
R Tyler Croy 2020-12-13 15:57:35 -08:00 committed by GitHub
commit 266ab91832
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 262 additions and 62 deletions

View File

@ -7,11 +7,11 @@ edition = "2018"
[lib]
name = "jdp"
source = "src/lib.rs"
path = "src/lib.rs"
[[bin]]
name = "jdp"
source = "src/main.rs"
path = "src/main.rs"
[dependencies]
gumdrop = "~0.8.0"

View File

@ -0,0 +1,104 @@
/*
* This deployment pipeline does not handle the building and pushing of the
* container to ECR, but rather only handles the orchestration of pushing a
* named container from one ECS environment to another
*/
pipeline {
agent {
kubernetes {
defaultContainer 'aws'
yamlFile 'deploy/containers.yml'
}
}
options {
ansiColor('xterm')
timeout(210)
buildDiscarder logRotator(numToKeepStr: '128')
}
parameters {
string description: 'A deployed version of the container, available in the prod ECR', name: 'IMAGE', trim: true
string description: 'The name of the task definition to update', name: 'FAMILY', trim: true, defaultValue: 'REDACTED'
}
environment {
AWS_DEFAULT_REGION = 'us-east-2'
AWS_DEFAULT_OUTPUT = 'json'
}
stages {
/*
* This stage will prepare the task-definition for deployment, and will
* generate the files for each stage
*/
stage('Prepare') {
steps {
sh 'rm -f task-definition.*.json'
writeJSON(file: 'task-definition.dev.json',
json: readYaml(text: readFile('deploy/task-definition.yml')
.replaceAll('@@IMAGE@@', params.IMAGE)
.replaceAll('@@FAMILY@@', params.FAMILY)))
sh 'echo DEV task definition:'
sh 'cat task-definition.dev.json'
}
}
/*
* The dev environment should always be updated without approval when any changes are
* pushed.
*/
stage('Development') {
environment {
AWS_ACCESS_KEY_ID = credentials('REDACTED')
}
steps {
sh 'aws ecs register-task-definition --cli-input-json file://`pwd`/task-definition.dev.json > task-output.dev.json'
/*
* Technically this could all be done in shell scripts if we
* had a container with `jq` installed, see the gist below for
* reference.
*
* At this point it's just easier to use the `script` escape
* hatch in Jenkins Pipeline to invoke some Scripted Pipeline
* steps
*
* https://gist.github.com/tstrohmeier/3da60392a5ea2eecbe32895e6624a2b4
*/
script {
def taskOutput = readJSON file: 'task-output.dev.json'
def revision = taskOutput.taskDefinition.revision
sh "aws ecs update-service --cluster ${CLUSTER} --service ${SERVICE} --task-definition ${FAMILY}:${revision}"
}
}
post {
failure {
slackSend color: 'danger',
message: ":siren: Failed to deploy a new task definition for REDACTED in dev :siren: - ${env.RUN_DISPLAY_URL}"
}
success {
slackSend message: ":thinking_face: A new REDACTED task definition is ready to deploy to staging: ${env.RUN_DISPLAY_URL}" }
}
}
}
post {
always {
/*
* Just to be safe we'll always clean up our generated JSON
* files
*/
sh 'rm -f *.json'
}
}
}
// vim: ft=groovy

View File

@ -2,8 +2,8 @@ extern crate pest;
#[macro_use]
extern crate pest_derive;
use pest::Parser;
use pest::error::Error as PestError;
use pest::Parser;
use std::path::PathBuf;
#[derive(Parser)]
@ -33,18 +33,18 @@ mod tests {
#[test]
fn parse_string_single() {
let _str = PipelineParser::parse(
Rule::string,
r#"'hello world'"#)
.unwrap().next().unwrap();
let _str = PipelineParser::parse(Rule::string, r#"'hello world'"#)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_string_double() {
let _str = PipelineParser::parse(
Rule::string,
r#""hello world""#)
.unwrap().next().unwrap();
let _str = PipelineParser::parse(Rule::string, r#""hello world""#)
.unwrap()
.next()
.unwrap();
}
#[test]
@ -63,7 +63,8 @@ pipeline {
}
}
}
"#)
"#,
)
.expect("Failed to parse")
.next()
.expect("Failed to iterate");
@ -71,26 +72,29 @@ pipeline {
#[test]
fn parse_no_options() {
let _options = PipelineParser::parse(
Rule::optionsDecl,
"options { }")
.unwrap().next().unwrap();
let _options = PipelineParser::parse(Rule::optionsDecl, "options { }")
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_options_no_args() {
let _options = PipelineParser::parse(
Rule::optionsDecl,
"options { timestamps() }")
.unwrap().next().unwrap();
let _options = PipelineParser::parse(Rule::optionsDecl, "options { timestamps() }")
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_options_kwargs() {
let _options = PipelineParser::parse(
Rule::optionsDecl,
"options { timeout(time: 4, unit: 'HOURS') }")
.unwrap().next().unwrap();
"options { timeout(time: 4, unit: 'HOURS') }",
)
.unwrap()
.next()
.unwrap();
}
/*
@ -102,16 +106,30 @@ pipeline {
fn parse_options_nested_func() {
let _options = PipelineParser::parse(
Rule::optionsDecl,
"options { buildDiscarder(logRotator(daysToKeepStr: '10')) }")
.unwrap().next().unwrap();
"options { buildDiscarder(logRotator(daysToKeepStr: '10')) }",
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_options_optional_parens() {
let _options = PipelineParser::parse(
Rule::optionsDecl,
"options { buildDiscarder logRotator(daysToKeepStr: '10') }",
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_triggers() {
let _t = PipelineParser::parse(
Rule::triggersDecl,
"triggers { pollSCM('H * * * *') }")
.unwrap().next().unwrap();
let _t = PipelineParser::parse(Rule::triggersDecl, "triggers { pollSCM('H * * * *') }")
.unwrap()
.next()
.unwrap();
}
#[test]
@ -120,16 +138,19 @@ pipeline {
Rule::environmentDecl,
r#"environment {
DISABLE_PROXY_CACHE = 'true'
}"#)
.unwrap().next().unwrap();
}"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_block_steps() {
let _s = PipelineParser::parse(
Rule::step,
"dir('foo') { sh 'make' }")
.unwrap().next().unwrap();
let _s = PipelineParser::parse(Rule::step, "dir('foo') { sh 'make' }")
.unwrap()
.next()
.unwrap();
}
#[test]
@ -145,13 +166,15 @@ pipeline {
extensions: [
[name: "refs/heads/${env.BRANCH_NAME}"],
],
])"#)
.unwrap().next().unwrap();
])"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_not_exactly_declarative_is_it_step() {
let _s = PipelineParser::parse(
Rule::step,
r#"checkout([
@ -161,8 +184,63 @@ pipeline {
url: scm.userRemoteConfigs[0].url
]
],
])"#)
.unwrap().next().unwrap();
])"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_script_step() {
let _s = PipelineParser::parse(
Rule::stepsDecl,
r#"steps {
script {
def taskOutput = readJSON file: 'task-output.dev.json'
def revision = taskOutput.taskDefinition.revision
sh "aws ecs update-service --cluster ${CLUSTER} --service ${SERVICE} --task-definition ${FAMILY}:${revision}"
}
}"#)
.unwrap().next().unwrap();
}
/*
* I put a step in your step so you can step while you step
*/
#[test]
fn parse_sup_dawg_heard_you_liked_steps() {
let _s = PipelineParser::parse(
Rule::stepsDecl,
r#"steps {
sh 'rm -f task-definition.*.json'
writeJSON(file: 'task-definition.dev.json',
json: readYaml(text: readFile('deploy/task-definition.yml')))
}"#,
)
.unwrap()
.next()
.unwrap();
}
#[test]
fn parse_abusive_chaining_of_groovy_on_steps() {
let _s = PipelineParser::parse(
Rule::stepsDecl,
r#"steps {
sh 'rm -f task-definition.*.json'
writeJSON(file: 'task-definition.dev.json',
json: readYaml(text: readFile('deploy/task-definition.yml')
.replaceAll('@@IMAGE@@', params.IMAGE)
.replaceAll('@@FAMILY@@', params.FAMILY)))
sh 'echo DEV task definition:'
sh 'cat task-definition.dev.json'
}"#,
)
.unwrap()
.next()
.unwrap();
}
}

View File

@ -1,4 +1,3 @@
use jdp::*;
use gumdrop::Options;
@ -25,7 +24,7 @@ enum Command {
struct CheckOpts {
#[options(help = "print help message")]
help: bool,
#[options(free, required, help="Path to a Jenkinsfile")]
#[options(free, required, help = "Path to a Jenkinsfile")]
file: std::path::PathBuf,
}
@ -46,10 +45,9 @@ fn main() {
if result.is_err() {
println!("Failed to parse!: {:#?}", result);
std::process::exit(1);
}
else {
} else {
println!("Looks valid! Great work!");
}
},
}
}
}

View File

@ -13,6 +13,7 @@ pipeline = _{ SOI ~ shebang? ~
agentDecl
| environmentDecl
| optionsDecl
| parametersDecl
| postDecl
| toolsDecl
| triggersDecl
@ -92,10 +93,10 @@ envProperty = {
}
credentialProperty = { IDENT ~
"=" ~
"credentials(" ~ string ~ ")"
"credentials" ~ opening_paren ~ string ~ closing_paren
}
func = { IDENT ~ "(" ~ (func | kwargs | args)? ~ ")" }
func = { IDENT ~ opening_paren? ~ (kwargs | args | func)? ~ closing_paren? }
inputDecl = { "input" ~
opening_brace ~
@ -116,6 +117,13 @@ parallelDecl = { "parallel" ~
closing_brace
}
parametersDecl = { "parameters" ~
opening_brace ~
// Not exactly a step, but the syntax looks the same to me!
(step)* ~
closing_brace
}
postDecl = { "post" ~
opening_brace ~
(postBlock+) ~
@ -134,6 +142,16 @@ postBlock = {
| "cleanup"
) ~ opening_brace ~ (step)+ ~ closing_brace }
scriptStep = { "script" ~ opening_brace ~ groovy ~ closing_brace }
groovy = {
(
// Handle nested structures
(opening_brace ~ groovy ~ closing_brace)? ~
!(closing_brace) ~
ANY
)*
}
stagesDecl = { "stages" ~
opening_brace ~
(stage)+ ~
@ -154,7 +172,7 @@ stage = { "stage(" ~ string ~ ")" ~
stepsDecl = { "steps" ~
opening_brace ~
(step)+ ~
(step | scriptStep)+ ~
closing_brace
}
@ -162,7 +180,10 @@ stepsDecl = { "steps" ~
step_args = _{ args | kwargs | array_args | map_args }
// A simple step can be invoked with parenthesis or without
simple_step = _{ IDENT ~
((opening_paren ~ step_args ~ closing_paren) | step_args)
((opening_paren ~ step_args ~ closing_paren) | step_args) ~
// This allows for really gross abusive chaining of Groovy
// functionality off the end of a step invocation
("." ~ simple_step)*
}
// A block step is something like dir('foo') { } that accepts steps within it
// in many cases these will have keyword arguments like withCredentials
@ -174,7 +195,7 @@ block_step = _{ IDENT ~
}
step = { block_step | simple_step }
arg = _{ string | BOOL | number | magic_vars | array_args | map_args }
arg = _{ string | BOOL | number | magic_vars | array_args | map_args | step }
args = { (arg ~ COMMA?)+ }
kwargs = _{ (kwarg ~ COMMA?)+ }
kwarg = { IDENT~ ":" ~ arg }
@ -187,9 +208,10 @@ property = { IDENT ~ "=" ~ string }
// These are magic variables that are legitimate to use in "declarative"
// pipeline
magic_vars = _{ envRef | scmRef }
magic_vars = _{ envRef | scmRef | paramsRef }
envRef = { "env." ~ nestedRef }
scmRef = { "scm." ~ nestedRef }
paramsRef = { "params." ~ nestedRef }
// Just parse anything until the whitespace I guess
nestedRef = _{ (IDENT ~ (opening_brack ~ (number) ~ closing_brack)? ~ "."?)+ }

View File

@ -1,25 +1,23 @@
/*
* This test module will test everything in test_data/valid
*/
use jdp::*;
use glob::glob;
use jdp::*;
fn test_glob(pattern: &str, can_parse: bool) {
for entry in glob(pattern).expect("Failed to read glob pattern") {
match entry {
Ok(path) => {
match path.file_name() {
Some(name) => {
if name == "Jenkinsfile" {
let result = parse_file(&path);
if can_parse && result.is_err() {
println!("{:?}", result);
}
assert_eq!(can_parse, result.is_ok(), "Parsing file failed {:?}", path);
Ok(path) => match path.file_name() {
Some(name) => {
if name == "Jenkinsfile" {
let result = parse_file(&path);
if can_parse && result.is_err() {
println!("{:?}", result);
}
},
_ => {},
assert_eq!(can_parse, result.is_ok(), "Parsing file failed {:?}", path);
}
}
_ => {}
},
Err(e) => println!("{:?}", e),
}