Move the grammar into its own directory

Now with more real tests
This commit is contained in:
R Tyler Croy 2019-07-06 18:37:03 -07:00
parent 665532da25
commit 7ed36d55b3
No known key found for this signature in database
GPG Key ID: E5C92681BEF6CEA2
9 changed files with 158 additions and 94 deletions

View File

@ -8,51 +8,36 @@
# Set the PATH so we can automatically include our node binstubs
export PATH:=./node_modules/.bin:${PATH}
ANTLR_BIN=antlr-4.7.2-complete.jar
ANTLR=contrib/$(ANTLR_BIN)
GRAMMAR=Otto.g4 OttoLexer.g4
SUB_DIRS=grammar
################################################################################
## Phony targets
all: help
build: depends ## Build all components
tsc
check: depends build ## Run validation tests
#dredd
node parse-test.js
swagger: depends ## Generate the swagger stubs based on apispecs
depends: prereqs $(ANTLR) ## Download all dependencies
prereqs: scripts/prereqs.sh ## Check that this system has the necessary tools to build otto
@sh scripts/prereqs.sh
clean: ## Clean all temporary/working files
rm -f $(ANTLR)
parser: depends $(GRAMMAR) ## Generate the parser code
@for target in Java JavaScript; do \
java -cp $(ANTLR) org.antlr.v4.Tool \
-Dlanguage=$$target \
-o build/parser/$$target \
$(GRAMMAR); \
echo "--> Generated $$target stubs"; \
done;
################################################################################
## Non-phony targets
$(ANTLR): ## Download the latest ANTLR4 binary
(cd contrib && wget https://www.antlr.org/download/$(ANTLR_BIN))
################################################################################
# Cute hack thanks to:
# https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
help: ## Display this help text
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
build: depends ## Build all components
$(foreach dir, $(SUB_DIRS), $(MAKE) -C $(dir) $@)
tsc
check: depends build ## Run validation tests
jest
#dredd
node parse-test.js
swagger: depends ## Generate the swagger stubs based on apispecs
depends: prereqs ## Download all dependencies
prereqs: scripts/prereqs.sh ## Check that this system has the necessary tools to build otto
@sh scripts/prereqs.sh
clean: ## Clean all temporary/working files
$(foreach dir, $(SUB_DIRS), $(MAKE) -C $(dir) $@)
################################################################################
.PHONY: all build check clean depends parser

39
grammar/Makefile Normal file
View File

@ -0,0 +1,39 @@
# List of .g4 files to compile
GRAMMAR=Otto.g4 OttoLexer.g4
# Target languages for the grammars
LANGS=JavaScript Java Cpp Go
# Antlr binary for execution
ANTLR_BIN=antlr-4.7.2-complete.jar
ANTLR=../contrib/$(ANTLR_BIN)
################################################################################
## Phony targets
################################################################################
# Cute hack thanks to:
# https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
help: ## Display this help text
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
build: $(GRAMMAR) $(ANTLR) ## Compile the grammars into their language stubs
@for target in $(LANGS); do \
java -cp $(ANTLR) org.antlr.v4.Tool \
-Dlanguage=$$target \
-o build/parser/$$target \
$(GRAMMAR); \
echo "--> Generated $$target stubs"; \
done;
clean: ## Clean all temporary/working files
rm -f $(ANTLR)
.PHONY: help
################################################################################
$(ANTLR): ## Download the latest ANTLR4 binary
wget -O $(ANTLR) https://www.antlr.org/download/$(ANTLR_BIN)

6
grammar/README.adoc Normal file
View File

@ -0,0 +1,6 @@
= Otto Grammars
This directory contains the link:https://github.com/antlr/antlr4/[Antlr v4]
grammars for parsing the modeling language that Otto uses for describing a
continuous delivery process.

82
grammar/__tests__/use.ts Normal file
View File

@ -0,0 +1,82 @@
/*
* This test file will verify the parsing behavior of the use block
*/
import antlr from 'antlr4';
import { ErrorListener } from 'antlr4/error/ErrorListener';
import { OttoLexer } from '../build/parser/JavaScript/OttoLexer';
import { OttoListener } from '../build/parser/JavaScript/OttoListener';
import { Otto } from '../build/parser/JavaScript/Otto';
const MIN_PIPELINE = 'pipeline { stages { stage { } } }'
class Visitor {
visitChildren(ctx) {
if (!ctx) {
return;
}
if (ctx.children) {
return ctx.children.map(child => {
if (child.children && child.children.length != 0) {
return child.accept(this);
} else {
return child.getText();
}
});
}
}
}
class JestListener extends ErrorListener {
public errors: Array<any> = [];
syntaxError(recognizer, offendingSymbol, line, column, msg, e) {
this.errors.push({
line: line,
column: column,
error: e,
message: msg,
});
}
}
describe('use {}', () => {
const parser = (buffer) => {
let chars = new antlr.InputStream(buffer);
let lexer = new OttoLexer(chars);
let tokens = new antlr.CommonTokenStream(lexer);
let parser = new Otto(tokens);
parser.buildParseTrees = true;
parser.removeErrorListeners();
const errorListener = new JestListener();
parser.addErrorListener(errorListener);
let tree = parser.pipeline();
tree.accept(new Visitor());
return errorListener.errors;
};
it('should fail on an empty string', () => {
expect(parser('')).toHaveLength(1);
});
it('should pass on an empty use block', () => {
expect(parser(`use {} ${MIN_PIPELINE}`)).toHaveLength(0);
});
it('should pass on a use with stdlib', () => {
expect(parser(`use { stdlib } ${MIN_PIPELINE}`)).toHaveLength(0);
});
it('should fail on a use with another symbol', () => {
expect(parser(`use { koopa } ${MIN_PIPELINE}`)).toHaveLength(1);
});
it('should pass with a string', () => {
expect(parser(`use { 'some/path' } ${MIN_PIPELINE}`)).toHaveLength(0);
});
});

4
jest.config.js Normal file
View File

@ -0,0 +1,4 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
};

View File

@ -1,55 +0,0 @@
const fs = require('fs');
const antlr = require('antlr4');
const Lexer = require('./build/parser/JavaScript/OttoLexer').OttoLexer;
const Parser = require('./build/parser/JavaScript/Otto').Otto;
const OttoListener = require('./build/parser/JavaScript/OttoListener').OttoListener;
class Visitor {
visitChildren(ctx) {
if (!ctx) {
console.log('noctx');
return;
}
if (ctx.children) {
return ctx.children.map(child => {
if (child.children && child.children.length != 0) {
return child.accept(this);
} else {
return child.getText();
}
});
}
}
}
class L extends OttoListener {
enterPipeline(ctx) {
console.log('entering pipeline');
}
enterConfigure_block(ctx) {
console.log('enter config');
}
enterUse_block(ctx) {
console.log('enter use');
}
}
[
'./examples/webapp.otto',
'./examples/matrix.otto',
].map((filename) => {
console.log(`Processing ${filename}`);
const input = fs.readFileSync(filename, 'utf8');
let chars = new antlr.InputStream(input);
let lexer = new Lexer(chars);
let tokens = new antlr.CommonTokenStream(lexer);
let parser = new Parser(tokens);
parser.buildParseTrees = true;
let tree = parser.pipeline();
//tree.accept(new Visitor());
antlr.tree.ParseTreeWalker.DEFAULT.walk(new L(), tree);
});

View File

@ -25,5 +25,8 @@
"typeRoots" : [
"./node_modules/@types"
]
}
},
"exclude": [
"**/__tests__/*.ts"
]
}