Merge pull request #5 from michaelmccaskill/CBN-360

Check data updates every 12 hours
This commit is contained in:
Michael McCaskill 2016-10-11 12:30:30 -04:00 committed by GitHub
commit 34589e34d7
13 changed files with 311 additions and 40 deletions

16
Jenkinsfile-generate Normal file
View File

@ -0,0 +1,16 @@
#!groovy
node {
stage 'Checkout'
echo "Checking out branch ${env.BRANCH_NAME}"
checkout scm
stage 'Generate Plugin Data'
withEnv(["PATH+MAVEN=${tool 'M3'}/bin"]) {
sh "mvn -P generatePluginData"
}
stage 'Upload Plugin Data'
// TODO
}

View File

@ -26,7 +26,7 @@ of the application.
== Run Local Plugin Site API
----
mvn jetty:run
DATA_FILE_URL="http://url.to/plugins.json.gzip" mvn jetty:run
----
This will launch an embedded Jetty container accessible at `http://localhost:8080`.
@ -35,7 +35,7 @@ This will launch an embedded Jetty container accessible at `http://localhost:808
----
docker build -t jenkinsciinfra/plugin-site-api .
docker run -p 8080:8080 -it jenkinsciinfra/plugin-site-api
docker run -p 8080:8080 -it -e DATA_FILE_URL="http://url.to/plugins.json.gzip" jenkinsciinfra/plugin-site-api
----
== Rebuild Elasticsearch data
@ -44,8 +44,8 @@ docker run -p 8080:8080 -it jenkinsciinfra/plugin-site-api
mvn -P generatePluginData
----
This will generate a new file in `src/main/resources/elasticsearch/data/plugins.json.gzip`
consisting of plugin information and installation statistics.
This will generate a new file in `target/plugins.json.gzip` consisting of plugin information and installation
statistics. This file should be uploaded to DATA_FILE_URL.
== REST API Reference

View File

@ -49,6 +49,11 @@
<artifactId>guava</artifactId>
<version>19.0</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.4</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>

View File

@ -253,9 +253,9 @@ public class GeneratePluginData {
}
private void writePluginsToFile(List<Plugin> plugins) {
final File data = Paths.get(System.getProperty("user.dir"), "src", "main", "resources", "elasticsearch", "data", "plugins.json.gzip").toFile();
final File data = Paths.get(System.getProperty("user.dir"), "target", "plugins.json.gzip").toFile();
try(final Writer writer = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(data)), "utf-8"))) {
JsonObjectMapper.getObjectMapper().writeValue(writer, plugins);
JsonObjectMapper.getObjectMapper().writeValue(writer, new GeneratedPluginData(plugins));
} catch (Exception e) {
logger.error("Problem writing plugin data to file", e);
throw new RuntimeException(e);

View File

@ -1,7 +1,8 @@
package io.jenkins.plugins.datastore;
import io.jenkins.plugins.commons.JsonObjectMapper;
import io.jenkins.plugins.models.Plugin;
import io.jenkins.plugins.models.GeneratedPluginData;
import io.jenkins.plugins.services.ConfigurationService;
import org.apache.commons.io.FileUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.bulk.BulkItemResponse;
@ -12,18 +13,20 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
import org.json.JSONArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.io.*;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
import java.util.Optional;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Copied and modified from:
@ -33,6 +36,11 @@ public class EmbeddedElasticsearchServer {
private final Logger logger = LoggerFactory.getLogger(EmbeddedElasticsearchServer.class);
private static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy.MM.dd_HH.mm.ss");
private static final String ALIAS = "plugins";
private static final String INDEX_PREFIX = "plugins_";
private static final String TYPE = "plugins";
private File tempDir;
private Node node;
@ -40,6 +48,12 @@ public class EmbeddedElasticsearchServer {
return node.client();
}
@Inject
private ConfigurationService configurationService;
@Inject
private ScheduledExecutorService scheduledExecutorService;
@PostConstruct
public void postConstruct() {
logger.info("Initialize elasticsearch");
@ -55,7 +69,8 @@ public class EmbeddedElasticsearchServer {
.build();
node = NodeBuilder.nodeBuilder().local(true).settings(settings).build();
node.start();
createAndPopulateIndex();
populateIndex();
scheduledExecutorService.scheduleWithFixedDelay(() -> populateIndex(), 12, 12, TimeUnit.HOURS);
logger.info("Initializing elasticsearch done");
}
@ -67,52 +82,93 @@ public class EmbeddedElasticsearchServer {
FileUtils.deleteQuietly(tempDir);
}
private void createAndPopulateIndex() {
private void populateIndex() {
try {
final GeneratedPluginData data = configurationService.getIndexData();
doPopulateIndex(data);
} catch (Exception e) {
logger.error("Problem populating index", e);
throw new RuntimeException("Problem populating index", e);
}
}
private void doPopulateIndex(GeneratedPluginData data) {
final Optional<LocalDateTime> optCreatedAt = getCurrentCreatedAt();
if (optCreatedAt.isPresent()) {
final LocalDateTime createdAt = optCreatedAt.get();
final LocalDateTime generatedCreatedAt = LocalDateTime.parse(TIMESTAMP_FORMATTER.format(data.getCreatedAt()), TIMESTAMP_FORMATTER);
logger.info("Current timestamp - " + createdAt);
logger.info("Data timestamp - " + generatedCreatedAt);
if (createdAt.equals(generatedCreatedAt) || createdAt.isAfter(generatedCreatedAt)) {
logger.info("Plugin data is already up to date");
return;
}
}
final ClassLoader cl = getClass().getClassLoader();
final String index = String.format("plugins_%s", DateTimeFormatter.ofPattern("yyyy.mm.dd_HH.mm.ss").format(LocalDateTime.now()));
final String index = String.format("%s%s", INDEX_PREFIX, TIMESTAMP_FORMATTER.format(data.getCreatedAt()));
try {
final File mappingFile = new File(cl.getResource("elasticsearch/mappings/plugins.json").getFile());
final String mappingContent = FileUtils.readFileToString(mappingFile, "utf-8");
final Client client = getClient();
client.admin().indices().prepareCreate(index)
.addMapping("plugins", mappingContent)
.addMapping(TYPE, mappingContent)
.get();
logger.info(String.format("Index '%s' created", index));
final File dataFile = new File(cl.getResource("elasticsearch/data/plugins.json.gzip").getFile());
final String data = readGzipFile(dataFile);
final JSONArray json = new JSONArray(data);
final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
for (int i = 0; i < json.length(); i++) {
// Seems redundant but it's actually a good test to ensure the generation process is working. If we can read
// a plugin from the JSON then it's good.
final Plugin plugin = JsonObjectMapper.getObjectMapper().readValue(json.getJSONObject(i).toString(), Plugin.class);
final IndexRequest indexRequest = client.prepareIndex(index, "plugins", plugin.getName())
.setSource(JsonObjectMapper.getObjectMapper().writeValueAsString(plugin)).request();
bulkRequestBuilder.add(indexRequest);
}
data.getPlugins().forEach((plugin) -> {
try {
final IndexRequest indexRequest = client.prepareIndex(index, TYPE, plugin.getName())
.setSource(JsonObjectMapper.getObjectMapper().writeValueAsString(plugin)).request();
bulkRequestBuilder.add(indexRequest);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
final BulkResponse response = bulkRequestBuilder.get();
if (response.hasFailures()) {
for (BulkItemResponse item : response.getItems()) {
logger.warn("Problem indexing: " + item.getFailureMessage());
logger.warn(String.format("Problem indexing: %s", item.getFailureMessage()));
}
throw new ElasticsearchException("Problem bulk indexing");
}
logger.info(String.format("Indexed %d plugins", json.length()));
client.admin().indices().prepareAliases().addAlias(index, "plugins").get();
client.admin().indices().prepareRefresh("plugins").execute().get();
logger.info(String.format("Alias plugins points to index %s", index));
logger.info(String.format("Indexed %d plugins", data.getPlugins().size()));
if (client.admin().indices().prepareAliasesExist(ALIAS).get().exists()) {
final String oldIndex = client.admin().indices().prepareGetAliases(ALIAS).get().getAliases().iterator().next().key;
// Atomic swap of alias
client.admin().indices().prepareAliases()
.removeAlias(oldIndex, ALIAS)
.addAlias(index, ALIAS)
.get();
logger.info(String.format("Updated alias '%s' from '%s' to '%s'", ALIAS, oldIndex, index));
client.admin().indices().prepareDelete(oldIndex).get();
logger.info(String.format("Deleted old index '%s'", oldIndex));
} else {
client.admin().indices().prepareAliases()
.addAlias(index, ALIAS)
.get();
logger.info(String.format("Alias (%s) plugins points to index %s", ALIAS, index));
}
client.admin().indices().prepareRefresh(ALIAS).execute().get();
} catch (Exception e) {
logger.error("Problem creating and populating index", e);
throw new RuntimeException("Problem creating and populating index", e);
logger.error("Problem indexing", e);
throw new RuntimeException("Problem indexing", e);
}
}
private String readGzipFile(final File file) {
try(final BufferedReader reader = new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(file)), "utf-8"))) {
return reader.lines().collect(Collectors.joining());
} catch (Exception e) {
logger.error("Problem decompressing plugin data", e);
throw new RuntimeException("Problem decompressing plugin data", e);
private Optional<LocalDateTime> getCurrentCreatedAt() {
final Client client = getClient();
if (client.admin().indices().prepareAliasesExist(ALIAS).get().exists()) {
final String index = client.admin().indices().prepareGetAliases(ALIAS).get().getAliases().iterator().next().key;
final String timestamp = index.substring(INDEX_PREFIX.length());
try {
return Optional.of(LocalDateTime.parse(timestamp, TIMESTAMP_FORMATTER));
} catch (Exception e) {
logger.error("Problem parsing timestamp from index", e);
return Optional.empty();
}
} else {
logger.info("Alias doesn't exist");
return Optional.empty();
}
}

View File

@ -0,0 +1,50 @@
package io.jenkins.plugins.models;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer;
import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer;
import java.time.LocalDateTime;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public class GeneratedPluginData {
@JsonProperty("plugins")
private List<Plugin> plugins;
@JsonProperty("createdAt")
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SS'Z'")
@JsonSerialize(using = LocalDateTimeSerializer.class)
@JsonDeserialize(using = LocalDateTimeDeserializer.class)
private LocalDateTime createdAt;
public GeneratedPluginData() {
}
public GeneratedPluginData(List<Plugin> plugins) {
this.plugins = plugins;
this.createdAt = LocalDateTime.now();
}
public List<Plugin> getPlugins() {
return plugins;
}
public void setPlugins(List<Plugin> plugins) {
this.plugins = plugins;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
}

View File

@ -1,5 +1,6 @@
package io.jenkins.plugins.services;
import io.jenkins.plugins.services.impl.DefaultConfigurationService;
import io.jenkins.plugins.services.impl.ElasticsearchDatastoreService;
import io.jenkins.plugins.services.impl.HttpClientWikiService;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
@ -11,6 +12,7 @@ import javax.inject.Singleton;
*
* <p>Binds</p>
* <ul>
* <li><code>DefaultConfigurationService</code> to <code>ConfigurationService</code> as a <code>Singleton</code></li>
* <li><code>ElasticsearchDatastoreService</code> to <code>DatastoreService</code> as a <code>Singleton</code></li>
* <li><code>HttpClientWikiService</code> to <code>WikiService</code> as a <code>Singleton</code></li>
* </ul>
@ -22,6 +24,7 @@ public class Binder extends AbstractBinder {
@Override
protected void configure() {
bind(DefaultConfigurationService.class).to(ConfigurationService.class).in(Singleton.class);
bind(ElasticsearchDatastoreService.class).to(DatastoreService.class).in(Singleton.class);
bind(HttpClientWikiService.class).to(WikiService.class).in(Singleton.class);
}

View File

@ -0,0 +1,18 @@
package io.jenkins.plugins.services;
import io.jenkins.plugins.models.GeneratedPluginData;
/**
* <p>Get various configuration pieces for the application</p>
*/
public interface ConfigurationService {
/**
* <p>Get index data need to populating Elasticsearch</p>
*
* @return GeneratedPluginData
* @throws ServiceException in case something goes wrong
*/
GeneratedPluginData getIndexData() throws ServiceException;
}

View File

@ -0,0 +1,84 @@
package io.jenkins.plugins.services.impl;
import io.jenkins.plugins.commons.JsonObjectMapper;
import io.jenkins.plugins.models.GeneratedPluginData;
import io.jenkins.plugins.services.ConfigurationService;
import io.jenkins.plugins.services.ServiceException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
/**
* <p>Default implementation of <code>ConfigurationService</code></p>
*/
public class DefaultConfigurationService implements ConfigurationService {
private final Logger logger = LoggerFactory.getLogger(DefaultConfigurationService.class);
@Override
public GeneratedPluginData getIndexData() throws ServiceException {
final CloseableHttpClient httpClient = HttpClients.createDefault();
try {
final String url = getDataFileUrl();
final HttpGet get = new HttpGet(url);
final CloseableHttpResponse response = httpClient.execute(get);
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
final HttpEntity entity = response.getEntity();
final InputStream inputStream = entity.getContent();
final File dataFile = File.createTempFile("plugins", ".json.gzip");
FileUtils.copyToFile(inputStream, dataFile);
final String data = readGzipFile(dataFile);
return JsonObjectMapper.getObjectMapper().readValue(data, GeneratedPluginData.class);
} else {
logger.error("Data file not found");
throw new RuntimeException("Data file not found");
}
} catch (Exception e) {
logger.error("Problem getting data file", e);
throw new ServiceException("Problem getting data file", e);
} finally {
try {
httpClient.close();
} catch (IOException e) {
logger.warn("Problem closing HttpClient", e);
}
}
}
private String getDataFileUrl() {
if (System.getenv().containsKey("DATA_FILE_URL")) {
final String url = StringUtils.trimToNull(System.getenv("DATA_FILE_URL"));
if (url == null) {
throw new RuntimeException("Environment variable 'DATA_FILE_URL' is empty");
}
return url;
} else {
final String url = StringUtils.trimToNull(System.getProperty("data.file.url"));
if (url == null) {
throw new RuntimeException("System property 'data.file.url' is not given");
}
return url;
}
}
private String readGzipFile(final File file) {
try(final BufferedReader reader = new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(file)), "utf-8"))) {
return reader.lines().collect(Collectors.joining());
} catch (Exception e) {
logger.error("Problem decompressing plugin data", e);
throw new RuntimeException("Problem decompressing plugin data", e);
}
}
}

View File

@ -1,13 +1,17 @@
package io.jenkins.plugins.services;
import io.jenkins.plugins.models.*;
import io.jenkins.plugins.services.impl.ElasticsearchDatastoreService;
import io.jenkins.plugins.services.impl.HttpClientWikiService;
import org.glassfish.hk2.api.ServiceLocator;
import org.glassfish.hk2.utilities.ServiceLocatorUtilities;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import javax.inject.Singleton;
import java.util.Arrays;
import java.util.Collections;
@ -22,7 +26,14 @@ public class DatastoreServiceIntegrationTest {
public static void setUp() throws Exception {
locator = ServiceLocatorUtilities.bind(
new io.jenkins.plugins.datastore.Binder(),
new io.jenkins.plugins.services.Binder());
new AbstractBinder() {
@Override
protected void configure() {
bind(MockConfigurationService.class).to(ConfigurationService.class).in(Singleton.class);
bind(ElasticsearchDatastoreService.class).to(DatastoreService.class).in(Singleton.class);
bind(HttpClientWikiService.class).to(WikiService.class).in(Singleton.class);
}
});
datastoreService = locator.getService(DatastoreService.class);
}

View File

@ -0,0 +1,27 @@
package io.jenkins.plugins.services;
import io.jenkins.plugins.commons.JsonObjectMapper;
import io.jenkins.plugins.models.GeneratedPluginData;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
public class MockConfigurationService implements ConfigurationService {
private final Logger logger = LoggerFactory.getLogger(MockConfigurationService.class);
@Override
public GeneratedPluginData getIndexData() throws ServiceException {
try {
logger.info("Using test plugin data");
final ClassLoader cl = getClass().getClassLoader();
final File dataFile = new File(cl.getResource("plugins.json").getFile());
final String data = FileUtils.readFileToString(dataFile, "utf-8");
return JsonObjectMapper.getObjectMapper().readValue(data, GeneratedPluginData.class);
} catch (Exception e) {
throw new RuntimeException("Can't get test plugin data");
}
}
}

File diff suppressed because one or more lines are too long