Travis can't deal with sudo comand in install. If sudo is there it

uses old infrastructure and they tests fail because of that. So
  - Remove the sudo and hence mysql 5.6.x
  - To make the cukes pass the test without 5.6, remove the
    timestamp precision in DDL and ignore the timestamp during
    model creation and cukes verification steps
  - fixes #127
This commit is contained in:
Giri Dandu 2015-04-09 16:38:26 -04:00
parent a8981f7a8a
commit a75daf1df2
9 changed files with 16 additions and 28 deletions

View File

@ -20,17 +20,3 @@ env:
before_install: before_install:
- mysql -e 'create database deploydb;' - mysql -e 'create database deploydb;'
- psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database travis_ci_test;' -U postgres
install:
- "sudo apt-get remove mysql-common mysql-server-5.5 mysql-server-core-5.5 mysql-client-5.5 mysql-client-core-5.5"
- "sudo apt-get autoremove"
- "sudo apt-get install libaio1"
- "wget -O mysql-5.6.14.deb http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.14-debian6.0-x86_64.deb/from/http://cdn.mysql.com/"
- "sudo dpkg -i mysql-5.6.14.deb"
- "sudo cp /opt/mysql/server-5.6/support-files/mysql.server /etc/init.d/mysql.server"
- "sudo ln -s /opt/mysql/server-5.6/bin/* /usr/bin/"
# some config values were changed since 5.5
- "sudo sed -i'' 's/table_cache/table_open_cache/' /etc/mysql/my.cnf"
- "sudo sed -i'' 's/log_slow_queries/slow_query_log/' /etc/mysql/my.cnf"
- "sudo sed -i'' 's/basedir[^=]\\+=.*$/basedir = \\/opt\\/mysql\\/server-5.6/' /etc/mysql/my.cnf"
- "sudo /etc/init.d/mysql.server start"

View File

@ -81,10 +81,12 @@ Then(~/^the response body should be:$/) { String expectedBody ->
} }
Then(~/^the body should be JSON:$/) { String expectedBody -> Then(~/^the body should be JSON:$/) { String expectedBody ->
ObjectMapper mapper = new ObjectMapper() ObjectMapper mapper = new ObjectMapper()
String body = response.readEntity(String.class) String body = response.readEntity(String.class)
templateVariables = [ templateVariables = [
'created_timestamp' : DateTime.now(), 'created_timestamp' : DateTime.now().withMillisOfSecond(0),
] ]
expectedBody = processTemplate(expectedBody, templateVariables) expectedBody = processTemplate(expectedBody, templateVariables)

View File

@ -125,7 +125,7 @@ Then(~/^the webhook should be invoked with the JSON:$/) { String expectedMessage
String requestMessageBody = requestWebhookObject.getRequestMessageBody() String requestMessageBody = requestWebhookObject.getRequestMessageBody()
templateVariables = [ templateVariables = [
'created_timestamp' : DateTime.now(), 'created_timestamp' : DateTime.now().withMillisOfSecond(0),
] ]
expectedMessageBody = processTemplate(expectedMessageBody, templateVariables) expectedMessageBody = processTemplate(expectedMessageBody, templateVariables)
@ -144,7 +144,7 @@ Then(~/^the webhook ([1-9][0-9]*) should be invoked with the JSON:$/) { int webh
String requestMessageBody = requestWebhookObject.getRequestMessageBodies()[webhookNumber -1] String requestMessageBody = requestWebhookObject.getRequestMessageBodies()[webhookNumber -1]
templateVariables = [ templateVariables = [
'created_timestamp' : DateTime.now(), 'created_timestamp' : DateTime.now().withMillisOfSecond(0),
] ]
expectedMessageBody = processTemplate(expectedMessageBody, templateVariables) expectedMessageBody = processTemplate(expectedMessageBody, templateVariables)

View File

@ -46,8 +46,8 @@ class V2__artifacts_versions extends DeployDBMigration {
ADD COLUMN ( ADD COLUMN (
version VARCHAR(255) NOT NULL, version VARCHAR(255) NOT NULL,
sourceUrl TEXT, sourceUrl TEXT,
createdAt TIMESTAMP(3), createdAt TIMESTAMP,
deletedAt TIMESTAMP(3) NULL deletedAt TIMESTAMP NULL
); );
""" """
} }

View File

@ -53,8 +53,8 @@ class V4__create_deployments_table extends DeployDBMigration {
environment VARCHAR(8192) NOT NULL, environment VARCHAR(8192) NOT NULL,
status INT NOT NULL, status INT NOT NULL,
createdAt TIMESTAMP(3), createdAt TIMESTAMP,
deletedAt TIMESTAMP(3) NULL, deletedAt TIMESTAMP NULL,
PRIMARY KEY (id) PRIMARY KEY (id)
); );

View File

@ -51,8 +51,8 @@ class V5__create_flows_table extends DeployDBMigration {
artifactId BIGINT NOT NULL, artifactId BIGINT NOT NULL,
service TEXT NOT NULL, service TEXT NOT NULL,
createdAt TIMESTAMP(3), createdAt TIMESTAMP,
deletedAt TIMESTAMP(3) NULL, deletedAt TIMESTAMP NULL,
PRIMARY KEY (id) PRIMARY KEY (id)
); );

View File

@ -57,8 +57,8 @@ class V6__create_promotion_results_table extends DeployDBMigration {
infoUrl TEXT, infoUrl TEXT,
deploymentId BIGINT NOT NULL, deploymentId BIGINT NOT NULL,
createdAt TIMESTAMP(3), createdAt TIMESTAMP,
deletedAt TIMESTAMP(3) NULL, deletedAt TIMESTAMP NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (deploymentId) REFERENCES deployments(id) FOREIGN KEY (deploymentId) REFERENCES deployments(id)

View File

@ -51,8 +51,8 @@ class V8__create_configs_table extends DeployDBMigration {
contents TEXT NOT NULL, contents TEXT NOT NULL,
ident VARCHAR(8192), ident VARCHAR(8192),
modelType INT NOT NULL, modelType INT NOT NULL,
createdAt TIMESTAMP(3), createdAt TIMESTAMP,
deletedAt TIMESTAMP(3) NULL, deletedAt TIMESTAMP NULL,
PRIMARY KEY (id) PRIMARY KEY (id)
); );
""" """

View File

@ -16,7 +16,7 @@ abstract class AbstractModel {
Long id Long id
@Column(name='createdAt', nullable=false) @Column(name='createdAt', nullable=false)
protected DateTime createdAt = DateTime.now() protected DateTime createdAt = DateTime.now().withMillisOfSecond(0)
@Column(name='deletedAt') @Column(name='deletedAt')
protected DateTime deletedAt protected DateTime deletedAt