Refactor the handling of TreeCache into the AbstractTreeWatcher itself

This should lay the groundwork for refactoring much of the BrokerTreeWatcher up
into the AbstractTreeWatcher
This commit is contained in:
R. Tyler Croy 2015-01-28 01:44:53 -08:00
parent 0706895af1
commit af19abfacb
5 changed files with 35 additions and 8 deletions

View File

@ -55,10 +55,9 @@ class Main {
client.start() client.start()
TreeCache cache = new TreeCache(client, '/consumers')
KafkaPoller poller = setupKafkaPoller(consumers, statsd, cli.hasOption('n')) KafkaPoller poller = setupKafkaPoller(consumers, statsd, cli.hasOption('n'))
BrokerTreeWatcher brokerWatcher = new BrokerTreeWatcher(client) BrokerTreeWatcher brokerWatcher = new BrokerTreeWatcher(client)
StandardTreeWatcher consumerWatcher = new StandardTreeWatcher(consumers) StandardTreeWatcher consumerWatcher = new StandardTreeWatcher(client, consumers)
consumerWatcher.onInitComplete << { consumerWatcher.onInitComplete << {
@ -69,10 +68,8 @@ class Main {
poller.refresh(brokers) poller.refresh(brokers)
} }
cache.listenable.addListener(consumerWatcher)
brokerWatcher.start() brokerWatcher.start()
cache.start() consumerWatcher.start()
logger.info("Started wait loop...") logger.info("Started wait loop...")

View File

@ -7,6 +7,7 @@ import groovy.transform.TypeChecked
import org.apache.curator.framework.CuratorFramework import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.recipes.cache.ChildData import org.apache.curator.framework.recipes.cache.ChildData
import org.apache.curator.framework.recipes.cache.TreeCache
import org.apache.curator.framework.recipes.cache.TreeCacheListener import org.apache.curator.framework.recipes.cache.TreeCacheListener
import org.apache.curator.framework.recipes.cache.TreeCacheEvent import org.apache.curator.framework.recipes.cache.TreeCacheEvent
import org.slf4j.Logger import org.slf4j.Logger
@ -23,11 +24,17 @@ abstract class AbstractTreeWatcher implements TreeCacheListener {
protected AbstractMap<TopicPartition, List<ConsumerOffset>> consumersMap protected AbstractMap<TopicPartition, List<ConsumerOffset>> consumersMap
protected List<Closure> onInitComplete protected List<Closure> onInitComplete
protected Logger logger protected Logger logger
protected CuratorFramework client
protected TreeCache cache
AbstractTreeWatcher(AbstractMap consumers) { AbstractTreeWatcher(CuratorFramework client, AbstractMap consumers) {
this.client = client
this.consumersMap = consumers this.consumersMap = consumers
this.onInitComplete = [] this.onInitComplete = []
this.logger = LoggerFactory.getLogger(this.class) this.logger = LoggerFactory.getLogger(this.class)
this.cache = new TreeCache(client, zookeeperPath())
this.cache.listenable.addListener(this)
} }
/** /**
@ -35,6 +42,19 @@ abstract class AbstractTreeWatcher implements TreeCacheListener {
*/ */
abstract ConsumerOffset processChildData(ChildData data) abstract ConsumerOffset processChildData(ChildData data)
/**
* Return the String of the path in Zookeeper this class should watch. This
* method must be safe to call from the initializer of the class
*/
abstract String zookeeperPath()
/**
* Start our internal cache
*/
void start() {
this.cache?.start()
}
/** /**
* Primary TreeCache event processing callback * Primary TreeCache event processing callback
*/ */

View File

@ -11,6 +11,11 @@ import org.apache.curator.framework.recipes.cache.ChildData
@TypeChecked @TypeChecked
@InheritConstructors @InheritConstructors
class StandardTreeWatcher extends AbstractTreeWatcher { class StandardTreeWatcher extends AbstractTreeWatcher {
private static final String ZK_PATH = '/consumers'
String zookeeperPath() {
return ZK_PATH
}
/** /**
* Extract the necessary information from a standard (i.e. high-level Kafka * Extract the necessary information from a standard (i.e. high-level Kafka

View File

@ -11,9 +11,11 @@ class AbstractTreeWatcherSpec extends Specification {
class MockWatcher extends AbstractTreeWatcher { class MockWatcher extends AbstractTreeWatcher {
MockWatcher() { MockWatcher() {
super([:]) super(null, [:])
} }
ConsumerOffset processChildData(ChildData d) { } ConsumerOffset processChildData(ChildData d) { }
String zookeeperPath() { return '/zk/spock' }
} }
def setup() { def setup() {

View File

@ -2,13 +2,16 @@ package com.github.lookout.verspaetung.zk
import spock.lang.* import spock.lang.*
import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.recipes.cache.ChildData import org.apache.curator.framework.recipes.cache.ChildData
class StandardTreeWatcherSpec extends Specification { class StandardTreeWatcherSpec extends Specification {
private StandardTreeWatcher watcher private StandardTreeWatcher watcher
private CuratorFramework mockCurator
def setup() { def setup() {
this.watcher = new StandardTreeWatcher([:]) this.mockCurator = Mock(CuratorFramework)
this.watcher = new StandardTreeWatcher(this.mockCurator, [:])
} }
def "processChildData should return null if null is given"() { def "processChildData should return null if null is given"() {