From fc2ac689d1ca425fbd6664d79a3188414f7f6b74 Mon Sep 17 00:00:00 2001 From: Anton Lindgren Date: Tue, 25 Oct 2016 15:59:08 +0200 Subject: [PATCH 01/23] More verbose (error) logging --- src/main/java/Client.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/Client.java b/src/main/java/Client.java index 91a9e30..4246e39 100644 --- a/src/main/java/Client.java +++ b/src/main/java/Client.java @@ -21,9 +21,10 @@ public class Client { final static String zookeeperConnect = System.getenv("ZOOKEEPER_CONNECT"); public static void main(String[] args) throws Exception { - if (topicName.length() < 1) { - throw new Exception("Missing environment variable 'TOPIC_NAME'!"); - } + if (topicName.length() < 1) throw new Exception("Missing environment variable 'TOPIC_NAME'!"); + if (zookeeperConnect.length() < 1) throw new Exception("Missing environment variable 'ZOOKEEKER_CONNECT'"); + + System.out.println("Connecting to zookeeper using address '" + zookeeperConnect + "'"); final int sessionTimeoutMs = 10 * 1000; final int connectionTimeoutMs = 8 * 1000; @@ -66,6 +67,7 @@ private static void tryCreate(ZkUtils zkUtils, String topicName, int nRetriesLef try { AdminUtils.createTopic(zkUtils, topicName, partitions, replication, topicConfig); } catch (Exception e) { + System.err.println("Topic create failed due to " + e.toString()); if (nRetriesLeft <= 0) { throw new RuntimeException("Failed to create topic \"" + topicName + "\". Is Kafka and Zookeeper running?"); } else { @@ -74,6 +76,8 @@ private static void tryCreate(ZkUtils zkUtils, String topicName, int nRetriesLef tryCreate(zkUtils, topicName, nRetriesLeft - 1); } } + + System.out.println("Successfully created topic '" + topicName + "'"); } } \ No newline at end of file From 0a686986939a97fbf33b4cecf07dc87268859993 Mon Sep 17 00:00:00 2001 From: Anton Lindgren Date: Tue, 25 Oct 2016 15:59:23 +0200 Subject: [PATCH 02/23] Add build-target so we can build internally --- build-contracts/docker-compose-create.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/build-contracts/docker-compose-create.yml b/build-contracts/docker-compose-create.yml index 52809cb..7d7e50b 100644 --- a/build-contracts/docker-compose-create.yml +++ b/build-contracts/docker-compose-create.yml @@ -8,8 +8,10 @@ services: - zookeeper client: build: ../ + image: localhost:5000/yolean/kafka-topic-client:$PUSH_TAG labels: com.yolean.build-contract: "" + com.yolean.build-target: "" links: - zookeeper test: From 6fad336d6e7edcfb80563af08439c08c817cc595 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Mon, 27 Nov 2017 18:52:02 +0100 Subject: [PATCH 03/23] wip convert to gradle build, Dockerfile is todo --- .editorconfig | 7 +++ .gitignore | 5 ++ Dockerfile | 1 - Dockerfile.dev | 15 ----- Dockerfile.prod | 30 ---------- build.gradle | 41 +++++++++++++ kafka-topic-client.iml | 34 ----------- maven-docker-build-settings.xml | 6 -- pom.xml | 57 ------------------- .../kafka/topic/client/cli}/Client.java | 1 + 10 files changed, 54 insertions(+), 143 deletions(-) create mode 100644 .editorconfig delete mode 120000 Dockerfile delete mode 100644 Dockerfile.dev delete mode 100644 Dockerfile.prod create mode 100644 build.gradle delete mode 100644 kafka-topic-client.iml delete mode 100644 maven-docker-build-settings.xml delete mode 100644 pom.xml rename src/main/java/{ => se/yolean/kafka/topic/client/cli}/Client.java (98%) diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..fa3df9b --- /dev/null +++ b/.editorconfig @@ -0,0 +1,7 @@ +root = true + +[*] +charset = utf-8 +trim_trailing_whitespace = true +indent_style = space +indent_size = 2 diff --git a/.gitignore b/.gitignore index c2e6efc..bde3668 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,8 @@ .idea target/ node_modules/ +/bin/ +.classpath +.gradle +.project +.settings diff --git a/Dockerfile b/Dockerfile deleted file mode 120000 index 4fc9ef5..0000000 --- a/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -Dockerfile.prod \ No newline at end of file diff --git a/Dockerfile.dev b/Dockerfile.dev deleted file mode 100644 index 4fa908b..0000000 --- a/Dockerfile.dev +++ /dev/null @@ -1,15 +0,0 @@ -FROM maven:3.3.9-jdk-8 - -WORKDIR /usr/src/app - -COPY target/kafka-topic-client-1.0-SNAPSHOT-jar-with-dependencies.jar kafka-topic-client.jar - -ENV ZOOKEEPER_CONNECT "zookeeper:2181" -ENV TOPIC_NAME "build-contract-test" -ENV RESET_TOPIC false -ENV NUM_PARTITIONS 1 -ENV NUM_REPLICAS 1 -ENV NUM_CREATE_RETRIES 5 - -ENTRYPOINT ["java", "-jar", "kafka-topic-client.jar"] - diff --git a/Dockerfile.prod b/Dockerfile.prod deleted file mode 100644 index 6804142..0000000 --- a/Dockerfile.prod +++ /dev/null @@ -1,30 +0,0 @@ -FROM maven:3.3.9-jdk-8 - -WORKDIR /usr/src/app - -COPY maven-docker-build-settings.xml $MAVEN_CONFIG/settings.xml - -COPY pom.xml . - -RUN mkdir -p src/main/java src/test/java - -RUN mvn package - -COPY src src - -RUN mvn package - -RUN cp target/kafka-topic-client-1.0-SNAPSHOT-jar-with-dependencies.jar kafka-topic-client.jar - -# This cleanup will probably not reduce image size as the layers have already been produced -RUN mvn clean && rm -Rf /m2-build-repository && rm $MAVEN_CONFIG/settings.xml - -ENV ZOOKEEPER_CONNECT "zookeeper:2181" -ENV TOPIC_NAME "build-contract-test" -ENV RESET_TOPIC false -ENV NUM_PARTITIONS 1 -ENV NUM_REPLICAS 1 -ENV NUM_CREATE_RETRIES 5 - -ENTRYPOINT ["java", "-jar", "kafka-topic-client.jar"] - diff --git a/build.gradle b/build.gradle new file mode 100644 index 0000000..6b2fb67 --- /dev/null +++ b/build.gradle @@ -0,0 +1,41 @@ +repositories { + jcenter() +} + +apply plugin: 'java' +apply plugin: "idea" +apply plugin: "eclipse" +apply plugin: "maven" +apply plugin: "jacoco" + +group 'se.yolean' +sourceCompatibility = 1.8 + +dependencies { + compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '1.0.0' + compile group: 'com.101tec', name: 'zkclient', version: '0.10' + + compile group: 'io.prometheus', name: 'simpleclient', version: '0.1.0' + compile group: 'io.prometheus', name: 'simpleclient_httpserver', version: '0.1.0' + + testCompile group: 'junit', name: 'junit', version: '4.12' + testCompile group: 'org.mockito', name: 'mockito-core', version: '2.12.0' +} + +task copyToLib(type: Copy) { + into "$buildDir/libs" + from configurations.runtime +} + +build.dependsOn(copyToLib) + +// for .editorconfig support in Eclipse +buildscript { + repositories { + jcenter() + } + dependencies { + classpath 'org.standardout:gradle-eclipseconfig:1.1.0' + } +} +apply plugin: 'org.standardout.eclipseconfig' diff --git a/kafka-topic-client.iml b/kafka-topic-client.iml deleted file mode 100644 index 1f63cc9..0000000 --- a/kafka-topic-client.iml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/maven-docker-build-settings.xml b/maven-docker-build-settings.xml deleted file mode 100644 index 4b756bb..0000000 --- a/maven-docker-build-settings.xml +++ /dev/null @@ -1,6 +0,0 @@ - - /m2-build-repository - diff --git a/pom.xml b/pom.xml deleted file mode 100644 index d7302f3..0000000 --- a/pom.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - 4.0.0 - - yolean - kafka-topic-client - 1.0-SNAPSHOT - - - 1.8 - 1.8 - - - - - - org.apache.kafka - kafka_2.12 - 1.0.0 - - - com.101tec - zkclient - 0.10 - - - - - - - - maven-assembly-plugin - - - - Client - - - - jar-with-dependencies - - - - - make-assembly - package - - single - - - - - - - diff --git a/src/main/java/Client.java b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java similarity index 98% rename from src/main/java/Client.java rename to src/main/java/se/yolean/kafka/topic/client/cli/Client.java index 6fb9ee4..18b8215 100644 --- a/src/main/java/Client.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java @@ -1,3 +1,4 @@ +package se.yolean.kafka.topic.client.cli; import kafka.admin.AdminOperationException; import org.I0Itec.zkclient.ZkClient; import org.I0Itec.zkclient.ZkConnection; From 71212dbdf649db2e31495aed3ef09d280e9587bf Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Mon, 27 Nov 2017 21:21:33 +0100 Subject: [PATCH 04/23] Adds dockerfile, with quite ok caching --- .dockerignore | 10 ++++++++++ .gitignore | 3 ++- Dockerfile | 39 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 .dockerignore create mode 100644 Dockerfile diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..8751b74 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,10 @@ +.idea +target/ +node_modules/ +/bin/ +/build/ +.classpath +.gradle +.project +.settings +.git diff --git a/.gitignore b/.gitignore index bde3668..847f0d4 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,8 @@ target/ node_modules/ /bin/ -.classpath +/build/ +.classpath .gradle .project .settings diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..79eabe2 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,39 @@ +FROM solsson/kafka-jre@sha256:06dabfc8cacd0687c8f52c52afd650444fb6d4a8e0b85f68557e6e7a5c71667c \ + as build + +ENV GRADLE_VERSION=4.3.1 + +RUN set -ex; \ + export DEBIAN_FRONTEND=noninteractive; \ + runDeps='curl'; \ + buildDeps='ca-certificates unzip'; \ + apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ + \ + cd /opt; \ + curl -SLs -o gradle-$GRADLE_VERSION-bin.zip https://services.gradle.org/distributions/gradle-$GRADLE_VERSION-bin.zip; \ + unzip gradle-$GRADLE_VERSION-bin.zip; \ + rm gradle-$GRADLE_VERSION-bin.zip; \ + ln -s /opt/gradle-$GRADLE_VERSION/bin/gradle /usr/local/bin/gradle; \ + gradle -v + +WORKDIR /opt/src/kafka-topic-client +COPY build.gradle ./ + +RUN set -ex; \ + mkdir -p src/main/java; \ + echo "public class Dummy {}" > src/main/java/Dummy.java; \ + gradle build; \ + rm src/main/java/Dummy.java + +COPY . . + +RUN set -ex; \ + gradle build + +FROM solsson/kafka-jre@sha256:06dabfc8cacd0687c8f52c52afd650444fb6d4a8e0b85f68557e6e7a5c71667c + +COPY --from=build /opt/src/kafka-topic-client/build/libs /usr/share/java/kafka-topic-client + +ENTRYPOINT [ "java", \ + "-cp", "/usr/share/java/kafka-topic-client/*:/etc/kafka-topic-client/*", \ + "se.yolean.kafka.topic.client.cli.Client" ] From e11637172998f083a15551fd745cb631b427eb97 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Mon, 27 Nov 2017 21:23:24 +0100 Subject: [PATCH 05/23] Adds a couple of dependencies we'll need for the service --- build.gradle | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/build.gradle b/build.gradle index 6b2fb67..e301675 100644 --- a/build.gradle +++ b/build.gradle @@ -1,5 +1,9 @@ repositories { + mavenCentral() jcenter() + maven { + url "http://packages.confluent.io/maven/" + } } apply plugin: 'java' @@ -9,12 +13,25 @@ apply plugin: "maven" apply plugin: "jacoco" group 'se.yolean' + +apply plugin: 'application' +mainClassName = 'se.yolean.kafka.topic.client.cli.Client' + sourceCompatibility = 1.8 dependencies { + compile group: 'javax.inject', name: 'javax.inject', version: '1' + compile group: 'com.google.inject', name: 'guice', version: '4.1.0' + + compile group: 'org.apache.kafka', name: 'kafka-clients', version: '1.0.0' + compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '1.0.0' compile group: 'com.101tec', name: 'zkclient', version: '0.10' + runtime group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.25' + compile group: 'structlog4j', name: 'structlog4j-api', version: '1.0.0' + compile group: 'structlog4j', name: 'structlog4j-json', version: '1.0.0' + compile group: 'io.prometheus', name: 'simpleclient', version: '0.1.0' compile group: 'io.prometheus', name: 'simpleclient_httpserver', version: '0.1.0' From 21ee78fa7acd17495a10d77bbc1ab1c25dda55c7 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 07:43:54 +0100 Subject: [PATCH 06/23] We're going to define the topic declaration schema at start --- build.gradle | 2 ++ 1 file changed, 2 insertions(+) diff --git a/build.gradle b/build.gradle index e301675..ca2e68d 100644 --- a/build.gradle +++ b/build.gradle @@ -28,6 +28,8 @@ dependencies { compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '1.0.0' compile group: 'com.101tec', name: 'zkclient', version: '0.10' + compile group: 'io.confluent', name: 'kafka-schema-registry-client', version: '4.0.0' + runtime group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.25' compile group: 'structlog4j', name: 'structlog4j-api', version: '1.0.0' compile group: 'structlog4j', name: 'structlog4j-json', version: '1.0.0' From bf60ed34d19ed91ee040149ba95815ac07f8adb3 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 09:03:35 +0100 Subject: [PATCH 07/23] Adapted from Schema Registry's topic setup https://github.com/confluentinc/schema-registry/blob/master/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaStore.java --- .../service/TopicDeclarationsTopicCheck.java | 155 ++++++++++++++++++ 1 file changed, 155 insertions(+) create mode 100644 src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java diff --git a/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java b/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java new file mode 100644 index 0000000..6c60d7b --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java @@ -0,0 +1,155 @@ +package se.yolean.kafka.topic.client.service; + +import java.util.Collections; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import javax.inject.Inject; +import javax.inject.Named; + +import org.apache.kafka.clients.admin.Config; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.admin.TopicDescription; +import org.apache.kafka.common.config.ConfigResource; +import org.apache.kafka.common.config.TopicConfig; +import org.apache.kafka.common.errors.TopicExistsException; +import org.apache.kafka.clients.admin.AdminClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TopicDeclarationsTopicCheck { + + private static final Logger log = LoggerFactory.getLogger(TopicDeclarationsTopicCheck.class); + + @Inject + @Named("admin") + private Properties props; + + @Inject + @Named("config:adminInitTimeoutMs") + private int initTimeout; + + @Inject + @Named("config:adminTopic") + private String topic; + + @Inject + @Named("config:adminTopicDesiredReplicationFactor") + private int desiredReplicationFactor; + + void createOrVerifySchemaTopic() throws StoreInitializationException { + + try (AdminClient admin = AdminClient.create(props)) { + // + Set allTopics = admin.listTopics().names().get(initTimeout, TimeUnit.MILLISECONDS); + if (allTopics.contains(topic)) { + verifySchemaTopic(admin); + } else { + createSchemaTopic(admin); + } + } catch (TimeoutException e) { + throw new StoreInitializationException( + "Timed out trying to create or validate schema topic configuration", + e + ); + } catch (InterruptedException | ExecutionException e) { + throw new StoreInitializationException( + "Failed trying to create or validate schema topic configuration", + e + ); + } + } + + private void createSchemaTopic(AdminClient admin) throws StoreInitializationException, + InterruptedException, + ExecutionException, + TimeoutException { + log.info("Creating schemas topic {}", topic); + + int numLiveBrokers = admin.describeCluster().nodes() + .get(initTimeout, TimeUnit.MILLISECONDS).size(); + if (numLiveBrokers <= 0) { + throw new StoreInitializationException("No live Kafka brokers"); + } + + int schemaTopicReplicationFactor = Math.min(numLiveBrokers, desiredReplicationFactor); + if (schemaTopicReplicationFactor < desiredReplicationFactor) { + log.warn("Creating the schema topic " + + topic + + " using a replication factor of " + + schemaTopicReplicationFactor + + ", which is less than the desired one of " + + desiredReplicationFactor + ". If this is a production environment, it's " + + "crucial to add more brokers and increase the replication factor of the topic."); + } + + NewTopic schemaTopicRequest = new NewTopic(topic, 1, (short) schemaTopicReplicationFactor); + schemaTopicRequest.configs( + Collections.singletonMap( + TopicConfig.CLEANUP_POLICY_CONFIG, + TopicConfig.CLEANUP_POLICY_COMPACT + ) + ); + try { + admin.createTopics(Collections.singleton(schemaTopicRequest)).all() + .get(initTimeout, TimeUnit.MILLISECONDS); + } catch (ExecutionException e) { + if (e.getCause() instanceof TopicExistsException) { + // This is ok. + } else { + throw e; + } + } + } + + private void verifySchemaTopic(AdminClient admin) throws StoreInitializationException, + InterruptedException, + ExecutionException, + TimeoutException { + log.info("Validating schemas topic {}", topic); + + Set topics = Collections.singleton(topic); + Map topicDescription = admin.describeTopics(topics) + .all().get(initTimeout, TimeUnit.MILLISECONDS); + + TopicDescription description = topicDescription.get(topic); + final int numPartitions = description.partitions().size(); + if (numPartitions != 1) { + throw new StoreInitializationException("The schema topic " + topic + " should have only 1 " + + "partition but has " + numPartitions); + } + + if (description.partitions().get(0).replicas().size() < desiredReplicationFactor) { + log.warn("The replication factor of the schema topic " + + topic + + " is less than the desired one of " + + desiredReplicationFactor + + ". If this is a production environment, it's crucial to add more brokers and " + + "increase the replication factor of the topic."); + } + + ConfigResource topicResource = new ConfigResource(ConfigResource.Type.TOPIC, topic); + + Map configs = + admin.describeConfigs(Collections.singleton(topicResource)).all() + .get(initTimeout, TimeUnit.MILLISECONDS); + Config topicConfigs = configs.get(topicResource); + String retentionPolicy = topicConfigs.get(TopicConfig.CLEANUP_POLICY_CONFIG).value(); + if (retentionPolicy == null || !TopicConfig.CLEANUP_POLICY_COMPACT.equals(retentionPolicy)) { + log.error("The retention policy of the schema topic " + topic + " is incorrect. " + + "You must configure the topic to 'compact' cleanup policy to avoid Kafka " + + "deleting your schemas after a week. " + + "Refer to Kafka documentation for more details on cleanup policies"); + + throw new StoreInitializationException("The retention policy of the schema topic " + topic + + " is incorrect. Expected cleanup.policy to be " + + "'compact' but it is " + retentionPolicy); + + } + } + +} From b467f5ad90a785b9a4ee8c10e186abd48a95e009 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 09:05:16 +0100 Subject: [PATCH 08/23] wip --- .../TopicDeclarationsTopicCheckTest.java | 47 +++++++++++++++++++ .../service/AdminClientPropsProvider.java | 36 ++++++++++++++ .../client/service/SchemaRegistryClient.java | 5 ++ .../service/StoreInitializationException.java | 29 ++++++++++++ 4 files changed, 117 insertions(+) create mode 100644 src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/service/StoreInitializationException.java diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java new file mode 100644 index 0000000..e0a16da --- /dev/null +++ b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java @@ -0,0 +1,47 @@ +package se.yolean.kafka.topic.client.service; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.google.inject.AbstractModule; +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.sun.javafx.scene.control.Properties; + +public class TopicDeclarationsTopicCheckTest { + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + } + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void test() throws Exception { + Injector injector = Guice.createInjector(new AbstractModule() { + @Override + protected void configure() { + bind(Properties.class).toInstance(new Properties()); + //bind(TopicDeclarationsTopicCheck.class); + } + }); + //TopicDeclarationsTopicCheck check = injector.getInstance(TopicDeclarationsTopicCheck.class); + //check.createOrVerifySchemaTopic(); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java new file mode 100644 index 0000000..a2d7cc2 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java @@ -0,0 +1,36 @@ +package se.yolean.kafka.topic.client.service; + +import java.util.Properties; + +import javax.inject.Inject; +import javax.inject.Named; +import javax.inject.Provider; + +public class AdminClientPropsProvider implements Provider { + + private String bootstrap; + private String acks; + + @Inject + public AdminClientPropsProvider(@Named("config:bootstrap") String bootstrap, + @Named("config:acks") String acks) { + this.bootstrap = bootstrap; + this.acks = acks; + } + + @Override + public Properties get() { + // https://kafka.apache.org/0110/javadoc/index.html?org/apache/kafka/clients/producer/KafkaProducer.html + Properties props = new Properties(); + props.put("bootstrap.servers", bootstrap); + props.put("acks", acks); + props.put("retries", 0); + props.put("batch.size", 16384); + props.put("linger.ms", 1); + props.put("buffer.memory", 33554432); + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + return props; + } + +} \ No newline at end of file diff --git a/src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java b/src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java new file mode 100644 index 0000000..30d567a --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java @@ -0,0 +1,5 @@ +package se.yolean.kafka.topic.client.service; + +public class SchemaRegistryClient { + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/service/StoreInitializationException.java b/src/main/java/se/yolean/kafka/topic/client/service/StoreInitializationException.java new file mode 100644 index 0000000..6a8c862 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/service/StoreInitializationException.java @@ -0,0 +1,29 @@ +package se.yolean.kafka.topic.client.service; + +public class StoreInitializationException extends Exception { + + public StoreInitializationException() { + // TODO Auto-generated constructor stub + } + + public StoreInitializationException(String arg0) { + super(arg0); + // TODO Auto-generated constructor stub + } + + public StoreInitializationException(Throwable arg0) { + super(arg0); + // TODO Auto-generated constructor stub + } + + public StoreInitializationException(String arg0, Throwable arg1) { + super(arg0, arg1); + // TODO Auto-generated constructor stub + } + + public StoreInitializationException(String arg0, Throwable arg1, boolean arg2, boolean arg3) { + super(arg0, arg1, arg2, arg3); + // TODO Auto-generated constructor stub + } + +} From 27f8370a59877c87a0f7e232933c97e28d070557 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 09:05:40 +0100 Subject: [PATCH 09/23] Let's use an integration test to experiment with topic setup --- build.gradle | 15 +++++++++++- .../IntegrationTestConfigLocalhost.java | 23 +++++++++++++++++++ .../TopicDeclarationsTopicCheckTest.java | 12 +++------- src/itest/resources/simplelogger.properties | 2 ++ .../service/AdminClientPropsProvider.java | 17 ++++---------- src/test/resources/simplelogger.properties | 2 ++ 6 files changed, 48 insertions(+), 23 deletions(-) create mode 100644 src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java create mode 100644 src/itest/resources/simplelogger.properties create mode 100644 src/test/resources/simplelogger.properties diff --git a/build.gradle b/build.gradle index ca2e68d..17494af 100644 --- a/build.gradle +++ b/build.gradle @@ -1,3 +1,4 @@ + repositories { mavenCentral() jcenter() @@ -14,10 +15,15 @@ apply plugin: "jacoco" group 'se.yolean' +sourceCompatibility = 1.8 + apply plugin: 'application' mainClassName = 'se.yolean.kafka.topic.client.cli.Client' -sourceCompatibility = 1.8 +configurations { + compile.exclude group: 'org.slf4j', module: 'slf4j-log4j12' + compile.exclude group: 'log4j', module: 'log4j' +} dependencies { compile group: 'javax.inject', name: 'javax.inject', version: '1' @@ -31,6 +37,7 @@ dependencies { compile group: 'io.confluent', name: 'kafka-schema-registry-client', version: '4.0.0' runtime group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.25' + runtime group: 'org.slf4j', name: 'log4j-over-slf4j', version: '1.7.25' compile group: 'structlog4j', name: 'structlog4j-api', version: '1.0.0' compile group: 'structlog4j', name: 'structlog4j-json', version: '1.0.0' @@ -55,6 +62,12 @@ buildscript { } dependencies { classpath 'org.standardout:gradle-eclipseconfig:1.1.0' + classpath 'org.unbroken-dome.gradle-plugins:gradle-testsets-plugin:1.4.2' } } apply plugin: 'org.standardout.eclipseconfig' + +apply plugin: 'org.unbroken-dome.test-sets' +testSets { + itest +} diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java new file mode 100644 index 0000000..84d8894 --- /dev/null +++ b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java @@ -0,0 +1,23 @@ +package se.yolean.kafka.topic.client.service; + +import java.util.Properties; + +import com.google.inject.AbstractModule; +import com.google.inject.name.Names; + +public class IntegrationTestConfigLocalhost extends AbstractModule { + + @Override + protected void configure() { + bind(String.class).annotatedWith(Names.named("config:bootstrap")).toInstance("localhost:9092"); + + bind(String.class).annotatedWith(Names.named("config:adminTopic")).toInstance("_topic_declarations"); + + bind(Integer.class).annotatedWith(Names.named("config:adminInitTimeoutMs")).toInstance(1000); + + bind(Integer.class).annotatedWith(Names.named("config:adminTopicDesiredReplicationFactor")).toInstance(1); + + bind(Properties.class).annotatedWith(Names.named("admin")).toProvider(AdminClientPropsProvider.class); + } + +} diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java index e0a16da..eb6a8fb 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java @@ -33,15 +33,9 @@ public void tearDown() throws Exception { @Test public void test() throws Exception { - Injector injector = Guice.createInjector(new AbstractModule() { - @Override - protected void configure() { - bind(Properties.class).toInstance(new Properties()); - //bind(TopicDeclarationsTopicCheck.class); - } - }); - //TopicDeclarationsTopicCheck check = injector.getInstance(TopicDeclarationsTopicCheck.class); - //check.createOrVerifySchemaTopic(); + Injector injector = Guice.createInjector(new IntegrationTestConfigLocalhost()); + TopicDeclarationsTopicCheck check = injector.getInstance(TopicDeclarationsTopicCheck.class); + check.createOrVerifySchemaTopic(); } } diff --git a/src/itest/resources/simplelogger.properties b/src/itest/resources/simplelogger.properties new file mode 100644 index 0000000..5534212 --- /dev/null +++ b/src/itest/resources/simplelogger.properties @@ -0,0 +1,2 @@ +org.slf4j.simpleLogger.defaultLogLevel=info +org.slf4j.simpleLogger.log.se.yolean=debug \ No newline at end of file diff --git a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java index a2d7cc2..e89b037 100644 --- a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java +++ b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java @@ -6,30 +6,21 @@ import javax.inject.Named; import javax.inject.Provider; +import org.apache.kafka.clients.admin.AdminClientConfig; + public class AdminClientPropsProvider implements Provider { private String bootstrap; - private String acks; @Inject - public AdminClientPropsProvider(@Named("config:bootstrap") String bootstrap, - @Named("config:acks") String acks) { + public AdminClientPropsProvider(@Named("config:bootstrap") String bootstrap) { this.bootstrap = bootstrap; - this.acks = acks; } @Override public Properties get() { - // https://kafka.apache.org/0110/javadoc/index.html?org/apache/kafka/clients/producer/KafkaProducer.html Properties props = new Properties(); - props.put("bootstrap.servers", bootstrap); - props.put("acks", acks); - props.put("retries", 0); - props.put("batch.size", 16384); - props.put("linger.ms", 1); - props.put("buffer.memory", 33554432); - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrap); return props; } diff --git a/src/test/resources/simplelogger.properties b/src/test/resources/simplelogger.properties new file mode 100644 index 0000000..5534212 --- /dev/null +++ b/src/test/resources/simplelogger.properties @@ -0,0 +1,2 @@ +org.slf4j.simpleLogger.defaultLogLevel=info +org.slf4j.simpleLogger.log.se.yolean=debug \ No newline at end of file From 362b6c55e537b1773869a6948d7b4c689600ef61 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 09:06:04 +0100 Subject: [PATCH 10/23] Apache 2.0 license --- LICENSE.txt | 177 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 177 insertions(+) create mode 100644 LICENSE.txt diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..f433b1a --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS From eb2920ddac7dbae76c823051e0d8e987e6a24e71 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 09:23:34 +0100 Subject: [PATCH 11/23] I'm using docker-compose locally, with an extra advertised.listeners Something like https://github.com/Yolean/kubernetes-kafka/pull/78 --- .../topic/client/service/IntegrationTestConfigLocalhost.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java index 84d8894..896feca 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java +++ b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java @@ -7,9 +7,11 @@ public class IntegrationTestConfigLocalhost extends AbstractModule { + public static final int KAFKA_LISTENER_PORT = 9094; + @Override protected void configure() { - bind(String.class).annotatedWith(Names.named("config:bootstrap")).toInstance("localhost:9092"); + bind(String.class).annotatedWith(Names.named("config:bootstrap")).toInstance("localhost:" + KAFKA_LISTENER_PORT); bind(String.class).annotatedWith(Names.named("config:adminTopic")).toInstance("_topic_declarations"); From 16b2d30e8a4ef252537431802945a24868d18399 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 10:05:00 +0100 Subject: [PATCH 12/23] Lists current schemas --- .../IntegrationTestConfigLocalhost.java | 3 + .../TopicDeclarationsTopicCheckTest.java | 2 - .../topic/mgmt/AdminSchemaUpdateTest.java | 55 +++++++++++++++++++ src/itest/resources/simplelogger.properties | 3 +- .../client/service/SchemaRegistryClient.java | 5 -- .../service/TopicDeclarationsTopicCheck.java | 16 +++--- .../kafka/topic/mgmt/AdminSchemaUpdate.java | 35 ++++++++++++ .../topic/mgmt/DeclarationLogConsumer.java | 9 +++ .../mgmt/SchemaRegistryClientProvider.java | 24 ++++++++ 9 files changed, 136 insertions(+), 16 deletions(-) create mode 100644 src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java delete mode 100644 src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java create mode 100644 src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java create mode 100644 src/main/java/se/yolean/kafka/topic/mgmt/DeclarationLogConsumer.java create mode 100644 src/main/java/se/yolean/kafka/topic/mgmt/SchemaRegistryClientProvider.java diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java index 896feca..433da65 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java +++ b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java @@ -20,6 +20,9 @@ protected void configure() { bind(Integer.class).annotatedWith(Names.named("config:adminTopicDesiredReplicationFactor")).toInstance(1); bind(Properties.class).annotatedWith(Names.named("admin")).toProvider(AdminClientPropsProvider.class); + + bind(String.class).annotatedWith(Names.named("config:schemaRegistryUrl")).toInstance("http://localhost:8081"); + } } diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java index eb6a8fb..80a7271 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java @@ -8,10 +8,8 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; -import com.sun.javafx.scene.control.Properties; public class TopicDeclarationsTopicCheckTest { diff --git a/src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java b/src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java new file mode 100644 index 0000000..0693995 --- /dev/null +++ b/src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java @@ -0,0 +1,55 @@ +package se.yolean.kafka.topic.mgmt; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.google.inject.AbstractModule; +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.name.Names; + +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import se.yolean.kafka.topic.client.service.IntegrationTestConfigLocalhost; + +public class AdminSchemaUpdateTest { + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + } + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void test() throws Exception { + Injector injector = Guice.createInjector( + new IntegrationTestConfigLocalhost(), + new AbstractModule() { + @Override + protected void configure() { + bind(SchemaRegistryClient.class).toProvider(SchemaRegistryClientProvider.class); + } + }); + AdminSchemaUpdate update = injector.getInstance(AdminSchemaUpdate.class); + update.getCurrentSchemaVersion(); + } + +} diff --git a/src/itest/resources/simplelogger.properties b/src/itest/resources/simplelogger.properties index 5534212..ff165ea 100644 --- a/src/itest/resources/simplelogger.properties +++ b/src/itest/resources/simplelogger.properties @@ -1,2 +1,3 @@ org.slf4j.simpleLogger.defaultLogLevel=info -org.slf4j.simpleLogger.log.se.yolean=debug \ No newline at end of file +org.slf4j.simpleLogger.log.se.yolean=debug +org.slf4j.simpleLogger.log.org.apache.kafka.clients.Metadata=debug diff --git a/src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java b/src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java deleted file mode 100644 index 30d567a..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/service/SchemaRegistryClient.java +++ /dev/null @@ -1,5 +0,0 @@ -package se.yolean.kafka.topic.client.service; - -public class SchemaRegistryClient { - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java b/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java index 6c60d7b..b62fa81 100644 --- a/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java +++ b/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java @@ -53,12 +53,12 @@ void createOrVerifySchemaTopic() throws StoreInitializationException { } } catch (TimeoutException e) { throw new StoreInitializationException( - "Timed out trying to create or validate schema topic configuration", + "Timed out trying to create or validate topic declarations topic configuration", e ); } catch (InterruptedException | ExecutionException e) { throw new StoreInitializationException( - "Failed trying to create or validate schema topic configuration", + "Failed trying to create or validate topic declarations topic configuration", e ); } @@ -78,7 +78,7 @@ private void createSchemaTopic(AdminClient admin) throws StoreInitializationExce int schemaTopicReplicationFactor = Math.min(numLiveBrokers, desiredReplicationFactor); if (schemaTopicReplicationFactor < desiredReplicationFactor) { - log.warn("Creating the schema topic " + log.warn("Creating the topic declarations topic " + topic + " using a replication factor of " + schemaTopicReplicationFactor @@ -99,7 +99,7 @@ private void createSchemaTopic(AdminClient admin) throws StoreInitializationExce .get(initTimeout, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { if (e.getCause() instanceof TopicExistsException) { - // This is ok. + log.warn("Topic {} exists, but was not listed. Concurrent operations?", topic); } else { throw e; } @@ -119,12 +119,12 @@ private void verifySchemaTopic(AdminClient admin) throws StoreInitializationExce TopicDescription description = topicDescription.get(topic); final int numPartitions = description.partitions().size(); if (numPartitions != 1) { - throw new StoreInitializationException("The schema topic " + topic + " should have only 1 " + throw new StoreInitializationException("The topic declarations topic " + topic + " should have only 1 " + "partition but has " + numPartitions); } if (description.partitions().get(0).replicas().size() < desiredReplicationFactor) { - log.warn("The replication factor of the schema topic " + log.warn("The replication factor of the topic declarations topic " + topic + " is less than the desired one of " + desiredReplicationFactor @@ -140,12 +140,12 @@ private void verifySchemaTopic(AdminClient admin) throws StoreInitializationExce Config topicConfigs = configs.get(topicResource); String retentionPolicy = topicConfigs.get(TopicConfig.CLEANUP_POLICY_CONFIG).value(); if (retentionPolicy == null || !TopicConfig.CLEANUP_POLICY_COMPACT.equals(retentionPolicy)) { - log.error("The retention policy of the schema topic " + topic + " is incorrect. " + log.error("The retention policy of the topic declarations topic " + topic + " is incorrect. " + "You must configure the topic to 'compact' cleanup policy to avoid Kafka " + "deleting your schemas after a week. " + "Refer to Kafka documentation for more details on cleanup policies"); - throw new StoreInitializationException("The retention policy of the schema topic " + topic + throw new StoreInitializationException("The retention policy of the topic declarations topic " + topic + " is incorrect. Expected cleanup.policy to be " + "'compact' but it is " + retentionPolicy); diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java b/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java new file mode 100644 index 0000000..aefaf5b --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java @@ -0,0 +1,35 @@ +package se.yolean.kafka.topic.mgmt; + +import java.io.IOException; +import java.util.Collection; + +import javax.inject.Inject; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +import io.confluent.kafka.schemaregistry.client.SchemaMetadata; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; + +public class AdminSchemaUpdate { + + private ILogger log = SLoggerFactory.getLogger(this.getClass()); + + private SchemaRegistryClient client; + + @Inject + public AdminSchemaUpdate(SchemaRegistryClient client) { + this.client = client; + } + + public void getCurrentSchemaVersion() throws IOException, RestClientException { + Collection allSubjects = client.getAllSubjects(); + for (String subject : allSubjects) { + SchemaMetadata metadata = client.getLatestSchemaMetadata(subject); + log.debug("Found schema", "subject", subject, "id", metadata.getId(), "version", metadata.getVersion()); + log.debug("" + metadata.getSchema()); + } + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/DeclarationLogConsumer.java b/src/main/java/se/yolean/kafka/topic/mgmt/DeclarationLogConsumer.java new file mode 100644 index 0000000..16ea6ae --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/mgmt/DeclarationLogConsumer.java @@ -0,0 +1,9 @@ +package se.yolean.kafka.topic.mgmt; + +public class DeclarationLogConsumer { + + public DeclarationLogConsumer() { + // TODO Auto-generated constructor stub + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/SchemaRegistryClientProvider.java b/src/main/java/se/yolean/kafka/topic/mgmt/SchemaRegistryClientProvider.java new file mode 100644 index 0000000..3f409d6 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/mgmt/SchemaRegistryClientProvider.java @@ -0,0 +1,24 @@ +package se.yolean.kafka.topic.mgmt; + +import javax.inject.Inject; +import javax.inject.Named; +import javax.inject.Provider; + +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; + +public class SchemaRegistryClientProvider implements Provider { + + public static final int INITIAL_MAP_CAPACITY = 10; + + @Inject + @Named("config:schemaRegistryUrl") + private String schemaRegistryBaseUrls; + + @Override + public SchemaRegistryClient get() { + // are there other impls? + return new CachedSchemaRegistryClient(schemaRegistryBaseUrls, INITIAL_MAP_CAPACITY); + } + +} From 5dc918caceeb6f6e55f4e0e215d353a2540fcfe3 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 10:34:49 +0100 Subject: [PATCH 13/23] We'll want to get topic declaration messages as their java representation --- README.md | 7 +++++++ build.gradle | 5 +++++ .../se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java | 8 ++++++++ 3 files changed, 20 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..5bf13c4 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ + + +## The `Topic` schema + +See `src/main/avro/Topic.avsc`. + +IDEs can get generated Java using `gradle build`. diff --git a/build.gradle b/build.gradle index 17494af..4644a5b 100644 --- a/build.gradle +++ b/build.gradle @@ -63,6 +63,7 @@ buildscript { dependencies { classpath 'org.standardout:gradle-eclipseconfig:1.1.0' classpath 'org.unbroken-dome.gradle-plugins:gradle-testsets-plugin:1.4.2' + classpath 'com.commercehub.gradle.plugin:gradle-avro-plugin:0.12.0' } } apply plugin: 'org.standardout.eclipseconfig' @@ -71,3 +72,7 @@ apply plugin: 'org.unbroken-dome.test-sets' testSets { itest } + +apply plugin: 'com.commercehub.gradle.plugin.avro' +avro { +} diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java b/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java index aefaf5b..5df1d41 100644 --- a/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java +++ b/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java @@ -5,12 +5,15 @@ import javax.inject.Inject; +import org.apache.avro.Schema; + import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; import io.confluent.kafka.schemaregistry.client.SchemaMetadata; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import se.yolean.kafka.topic.declaration.Topic; public class AdminSchemaUpdate { @@ -18,6 +21,10 @@ public class AdminSchemaUpdate { private SchemaRegistryClient client; + private String topicDeclarationSchemaName = "topic_declaration"; + + private Class topicDeclarationGeneratedClass = Topic.class; + @Inject public AdminSchemaUpdate(SchemaRegistryClient client) { this.client = client; @@ -29,6 +36,7 @@ public void getCurrentSchemaVersion() throws IOException, RestClientException { SchemaMetadata metadata = client.getLatestSchemaMetadata(subject); log.debug("Found schema", "subject", subject, "id", metadata.getId(), "version", metadata.getVersion()); log.debug("" + metadata.getSchema()); + Schema latestSchema = client.getBySubjectAndId(subject, metadata.getId()); } } From 5979eb4f2a28373580f1f452add7d355545f5b88 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 11:17:20 +0100 Subject: [PATCH 14/23] Shows that we have the current schema source to compare with registry's version --- src/main/avro/Topic.avsc | 8 +++ .../TopicSchemaSourceClasspathProvider.java | 40 +++++++++++++++ .../topic/mgmt/TopicSchemaSourceProvider.java | 14 ++++++ ...opicSchemaSourceClasspathProviderTest.java | 49 +++++++++++++++++++ 4 files changed, 111 insertions(+) create mode 100644 src/main/avro/Topic.avsc create mode 100644 src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java create mode 100644 src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java create mode 100644 src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java diff --git a/src/main/avro/Topic.avsc b/src/main/avro/Topic.avsc new file mode 100644 index 0000000..fda5b1a --- /dev/null +++ b/src/main/avro/Topic.avsc @@ -0,0 +1,8 @@ +{ + "name": "Topic", + "namespace": "se.yolean.kafka.topic.declaration", + "type": "record", + "fields" : [ + {"name": "name", "type": "string" } + ] +} diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java b/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java new file mode 100644 index 0000000..88992f5 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java @@ -0,0 +1,40 @@ +package se.yolean.kafka.topic.mgmt; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.nio.charset.Charset; + +import javax.inject.Provider; + +@Deprecated // not sure we'll have the schema source in classpath +public class TopicSchemaSourceClasspathProvider implements Provider { + + @Override + public String get() { + String path = "Topic.avsc"; + InputStream source = ClassLoader.getSystemResourceAsStream(path); + if (source == null) { + throw new RuntimeException("Failed to read the distribution's Topic schema from " + path); + } + return slurp(source, 1); + } + + public static String slurp(final InputStream is, final int bufferSize) { + final char[] buffer = new char[bufferSize]; + final StringBuilder out = new StringBuilder(); + try (Reader in = new InputStreamReader(is, Charset.forName("UTF-8"))) { + for (;;) { + int rsz = in.read(buffer, 0, buffer.length); + if (rsz < 0) + break; + out.append(buffer, 0, rsz); + } + } catch (IOException ex) { + throw new RuntimeException(ex); + } + return out.toString(); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java b/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java new file mode 100644 index 0000000..b2d7aa9 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java @@ -0,0 +1,14 @@ +package se.yolean.kafka.topic.mgmt; + +import javax.inject.Provider; + +import se.yolean.kafka.topic.declaration.Topic; + +public class TopicSchemaSourceProvider implements Provider { + + @Override + public String get() { + return Topic.SCHEMA$.toString(); + } + +} diff --git a/src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java b/src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java new file mode 100644 index 0000000..7dac1e8 --- /dev/null +++ b/src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java @@ -0,0 +1,49 @@ +package se.yolean.kafka.topic.mgmt; + +import static org.junit.Assert.*; + +import javax.inject.Provider; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +public class TopicSchemaSourceClasspathProviderTest { + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + } + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + @Ignore // TODO I'm undecided on how to use the source schema in dev and production + public void testClasspath() { + Provider provider = new TopicSchemaSourceClasspathProvider(); + String schemaSourceForBuild = provider.get(); + assertNotNull(schemaSourceForBuild); + } + + @Test + public void testFromGenerated() { + Provider provider = new TopicSchemaSourceProvider(); + String schemaSourceForBuild = provider.get(); + assertNotNull(schemaSourceForBuild); + assertNotEquals(0, schemaSourceForBuild.length()); + System.out.println(schemaSourceForBuild); + } + +} From 8c6e5b4cfbbba412d68653a2486422e8624a904c Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Tue, 28 Nov 2017 16:07:36 +0100 Subject: [PATCH 15/23] Uploads our local schema, if it's missing --- .../IntegrationTestConfigLocalhost.java | 5 ++ .../TopicDeclarationsTopicCheckTest.java | 2 +- .../topic/mgmt/AdminSchemaUpdateTest.java | 26 ++++++---- .../service/TopicDeclarationsTopicCheck.java | 2 +- .../kafka/topic/mgmt/AdminSchemaUpdate.java | 46 +++++++++++++++-- .../TopicSchemaSourceClasspathProvider.java | 40 --------------- .../topic/mgmt/TopicSchemaSourceProvider.java | 8 +-- ...opicSchemaSourceClasspathProviderTest.java | 49 ------------------- 8 files changed, 70 insertions(+), 108 deletions(-) delete mode 100644 src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java delete mode 100644 src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java index 433da65..18a816d 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java +++ b/src/itest/java/se/yolean/kafka/topic/client/service/IntegrationTestConfigLocalhost.java @@ -2,9 +2,13 @@ import java.util.Properties; +import org.apache.avro.Schema; + import com.google.inject.AbstractModule; import com.google.inject.name.Names; +import se.yolean.kafka.topic.mgmt.TopicSchemaSourceProvider; + public class IntegrationTestConfigLocalhost extends AbstractModule { public static final int KAFKA_LISTENER_PORT = 9094; @@ -23,6 +27,7 @@ protected void configure() { bind(String.class).annotatedWith(Names.named("config:schemaRegistryUrl")).toInstance("http://localhost:8081"); + bind(Schema.class).toProvider(TopicSchemaSourceProvider.class); } } diff --git a/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java index 80a7271..6dc56cc 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheckTest.java @@ -33,7 +33,7 @@ public void tearDown() throws Exception { public void test() throws Exception { Injector injector = Guice.createInjector(new IntegrationTestConfigLocalhost()); TopicDeclarationsTopicCheck check = injector.getInstance(TopicDeclarationsTopicCheck.class); - check.createOrVerifySchemaTopic(); + check.createOrVerifyAdminTopic(); } } diff --git a/src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java b/src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java index 0693995..04527b3 100644 --- a/src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java +++ b/src/itest/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdateTest.java @@ -22,8 +22,18 @@ public class AdminSchemaUpdateTest { + private static Injector injector; + @BeforeClass public static void setUpBeforeClass() throws Exception { + injector = Guice.createInjector( + new IntegrationTestConfigLocalhost(), + new AbstractModule() { + @Override + protected void configure() { + bind(SchemaRegistryClient.class).toProvider(SchemaRegistryClientProvider.class); + } + }); } @AfterClass @@ -40,16 +50,14 @@ public void tearDown() throws Exception { @Test public void test() throws Exception { - Injector injector = Guice.createInjector( - new IntegrationTestConfigLocalhost(), - new AbstractModule() { - @Override - protected void configure() { - bind(SchemaRegistryClient.class).toProvider(SchemaRegistryClientProvider.class); - } - }); AdminSchemaUpdate update = injector.getInstance(AdminSchemaUpdate.class); - update.getCurrentSchemaVersion(); + update.getCurrentSchema(); + } + + @Test + public void uploadSchemaTest() throws Exception { + AdminSchemaUpdate update = injector.getInstance(AdminSchemaUpdate.class); + update.uploadCurrentSchema(); } } diff --git a/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java b/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java index b62fa81..85a4c1b 100644 --- a/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java +++ b/src/main/java/se/yolean/kafka/topic/client/service/TopicDeclarationsTopicCheck.java @@ -41,7 +41,7 @@ public class TopicDeclarationsTopicCheck { @Named("config:adminTopicDesiredReplicationFactor") private int desiredReplicationFactor; - void createOrVerifySchemaTopic() throws StoreInitializationException { + void createOrVerifyAdminTopic() throws StoreInitializationException { try (AdminClient admin = AdminClient.create(props)) { // diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java b/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java index 5df1d41..b431f0c 100644 --- a/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java +++ b/src/main/java/se/yolean/kafka/topic/mgmt/AdminSchemaUpdate.java @@ -6,6 +6,7 @@ import javax.inject.Inject; import org.apache.avro.Schema; +import org.slf4j.Logger; import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; @@ -13,7 +14,6 @@ import io.confluent.kafka.schemaregistry.client.SchemaMetadata; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; -import se.yolean.kafka.topic.declaration.Topic; public class AdminSchemaUpdate { @@ -21,23 +21,59 @@ public class AdminSchemaUpdate { private SchemaRegistryClient client; + // TODO configurable private String topicDeclarationSchemaName = "topic_declaration"; - private Class topicDeclarationGeneratedClass = Topic.class; + private Schema topicSchema; @Inject - public AdminSchemaUpdate(SchemaRegistryClient client) { + public AdminSchemaUpdate(SchemaRegistryClient client, Schema topicSchema) { this.client = client; + this.topicSchema = topicSchema; } - public void getCurrentSchemaVersion() throws IOException, RestClientException { + public void createOrVerifyAdminSchema() { + SchemaMetadata existing; + try { + existing = getCurrentSchema(); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (RestClientException e) { + throw new RuntimeException(e); + } + if (existing != null) { + log.info("Stored schema is up-to-date", "id", existing.getId(), "version", existing.getVersion()); + return; + } + try { + uploadCurrentSchema(); + } catch (IOException e) { + throw new RuntimeException("Schema upload error", e); + } catch (RestClientException e) { + throw new RuntimeException("Schema upload error", e); + } + } + + public SchemaMetadata getCurrentSchema() throws IOException, RestClientException { Collection allSubjects = client.getAllSubjects(); for (String subject : allSubjects) { SchemaMetadata metadata = client.getLatestSchemaMetadata(subject); log.debug("Found schema", "subject", subject, "id", metadata.getId(), "version", metadata.getVersion()); - log.debug("" + metadata.getSchema()); Schema latestSchema = client.getBySubjectAndId(subject, metadata.getId()); + if (topicSchema.equals(latestSchema)) { + log.info("This is the topic schema!", "subject", subject, "id", metadata.getId(), "version", metadata.getVersion(), "fields", latestSchema.getFields().size()); + return metadata; + } else { + log.info("Not the topic schema", "subject", subject, "id", metadata.getId()); + } } + return null; + } + + public void uploadCurrentSchema() throws IOException, RestClientException { + log.info("Uploading current schema to registry", "subject", topicDeclarationSchemaName, "json", topicSchema.toString()); + int register = client.register(topicDeclarationSchemaName, topicSchema); + log.info("Uploaded schema", "id", register); } } diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java b/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java deleted file mode 100644 index 88992f5..0000000 --- a/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProvider.java +++ /dev/null @@ -1,40 +0,0 @@ -package se.yolean.kafka.topic.mgmt; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; -import java.nio.charset.Charset; - -import javax.inject.Provider; - -@Deprecated // not sure we'll have the schema source in classpath -public class TopicSchemaSourceClasspathProvider implements Provider { - - @Override - public String get() { - String path = "Topic.avsc"; - InputStream source = ClassLoader.getSystemResourceAsStream(path); - if (source == null) { - throw new RuntimeException("Failed to read the distribution's Topic schema from " + path); - } - return slurp(source, 1); - } - - public static String slurp(final InputStream is, final int bufferSize) { - final char[] buffer = new char[bufferSize]; - final StringBuilder out = new StringBuilder(); - try (Reader in = new InputStreamReader(is, Charset.forName("UTF-8"))) { - for (;;) { - int rsz = in.read(buffer, 0, buffer.length); - if (rsz < 0) - break; - out.append(buffer, 0, rsz); - } - } catch (IOException ex) { - throw new RuntimeException(ex); - } - return out.toString(); - } - -} diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java b/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java index b2d7aa9..387069c 100644 --- a/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java +++ b/src/main/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceProvider.java @@ -2,13 +2,15 @@ import javax.inject.Provider; +import org.apache.avro.Schema; + import se.yolean.kafka.topic.declaration.Topic; -public class TopicSchemaSourceProvider implements Provider { +public class TopicSchemaSourceProvider implements Provider { @Override - public String get() { - return Topic.SCHEMA$.toString(); + public Schema get() { + return Topic.SCHEMA$; } } diff --git a/src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java b/src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java deleted file mode 100644 index 7dac1e8..0000000 --- a/src/test/java/se/yolean/kafka/topic/mgmt/TopicSchemaSourceClasspathProviderTest.java +++ /dev/null @@ -1,49 +0,0 @@ -package se.yolean.kafka.topic.mgmt; - -import static org.junit.Assert.*; - -import javax.inject.Provider; - -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; - -public class TopicSchemaSourceClasspathProviderTest { - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - } - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - @Ignore // TODO I'm undecided on how to use the source schema in dev and production - public void testClasspath() { - Provider provider = new TopicSchemaSourceClasspathProvider(); - String schemaSourceForBuild = provider.get(); - assertNotNull(schemaSourceForBuild); - } - - @Test - public void testFromGenerated() { - Provider provider = new TopicSchemaSourceProvider(); - String schemaSourceForBuild = provider.get(); - assertNotNull(schemaSourceForBuild); - assertNotEquals(0, schemaSourceForBuild.length()); - System.out.println(schemaSourceForBuild); - } - -} From a1a000d46a41e24ca0ac4ce6b355ae762eb39482 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Thu, 30 Nov 2017 20:01:46 +0100 Subject: [PATCH 16/23] Evaluating concurrency and retry alternatives --- .../ClusterDescriptionProviderTest.java | 34 +++++++ .../yolean/kafka/topic/client/cli/Client.java | 51 +++++++++++ .../client/cli/ManagedTopicsService.java | 55 ++++++++++++ .../config/AdminConsumerPropsProvider.java | 39 ++++++++ .../client/config/CallExecutorProvider.java | 19 ++++ .../config/ClusterDescriptionProvider.java | 37 ++++++++ .../client/config/DocumentedProperty.java | 74 +++++++++++++++ .../config/ExecutorRetryProviderForInit.java | 26 ++++++ .../config/ExecutorServiceProvider.java | 28 ++++++ .../client/config/ManagerConfigModule.java | 41 +++++++++ .../topic/client/config/MetricsModule.java | 66 ++++++++++++++ .../executor/RetryingExecutorService.java | 7 ++ .../service/AdminClientPropsProvider.java | 1 + .../client/service/AdminClientProvider.java | 34 +++++++ .../kafka/topic/client/tasks/BrokerProbe.java | 90 +++++++++++++++++++ .../topic/client/tasks/BrokerStatus.java | 5 ++ .../topic/client/tasks/RestProxySetup.java | 15 ++++ .../client/tasks/SchemaRegistrySetup.java | 15 ++++ .../yolean/kafka/topic/client/tasks/Task.java | 15 ++++ .../client/tasks/TaskRetryBackoffWrapper.java | 29 ++++++ .../kafka/topic/mgmt/TopicConsumerLoop.java | 10 +++ src/main/resources/default.properties | 1 + .../client/config/DocumentedPropertyTest.java | 22 +++++ .../config/ManagerConfigModuleTest.java | 34 +++++++ 24 files changed, 748 insertions(+) create mode 100644 src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/AdminConsumerPropsProvider.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/DocumentedProperty.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/service/AdminClientProvider.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/RestProxySetup.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/SchemaRegistrySetup.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/Task.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java create mode 100644 src/main/java/se/yolean/kafka/topic/mgmt/TopicConsumerLoop.java create mode 100644 src/main/resources/default.properties create mode 100644 src/test/java/se/yolean/kafka/topic/client/config/DocumentedPropertyTest.java create mode 100644 src/test/java/se/yolean/kafka/topic/client/config/ManagerConfigModuleTest.java diff --git a/src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java b/src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java new file mode 100644 index 0000000..b8a4950 --- /dev/null +++ b/src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java @@ -0,0 +1,34 @@ +package se.yolean.kafka.topic.client.config; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class ClusterDescriptionProviderTest { + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + } + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGet() { + + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java index 18b8215..39ec71b 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java @@ -3,6 +3,11 @@ import org.I0Itec.zkclient.ZkClient; import org.I0Itec.zkclient.ZkConnection; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; import java.util.Properties; import java.util.concurrent.TimeUnit; @@ -14,6 +19,8 @@ public class Client { + final static String DEFAULT_PROPERTIES_FILE = "defaults.properties"; + final static String topicName = System.getenv("TOPIC_NAME"); final static boolean resetTopic = Boolean.parseBoolean(System.getenv("RESET_TOPIC")); final static int partitions = Integer.parseInt(System.getenv("NUM_PARTITIONS")); @@ -24,7 +31,51 @@ public class Client { final static String zookeeperConnect = System.getenv("ZOOKEEPER_CONNECT"); + static ClassLoader getClassLoaderForDefaults() { + return Client.class.getClassLoader(); + } + + static ManagedTopicsService managerStart(String managerPropertiesPath) { + Properties properties = new Properties(); + InputStream defaultProperties = getClassLoaderForDefaults().getResourceAsStream(DEFAULT_PROPERTIES_FILE); + if (defaultProperties == null) { + throw new RuntimeException("Failed to load default properties " + DEFAULT_PROPERTIES_FILE); + } + try { + properties.load(defaultProperties); + } catch (IOException e) { + throw new RuntimeException("Failed to load default properties from " + DEFAULT_PROPERTIES_FILE, e); + } + + File managerProperties = new File(managerPropertiesPath); + if (!managerProperties.exists()) { + throw new RuntimeException("Failed to find properties file " + managerPropertiesPath); + } + if (!managerProperties.canRead()) { + throw new RuntimeException("Unreadable properties file " + managerPropertiesPath); + } + FileReader managerPropertiesReader; + try { + managerPropertiesReader = new FileReader(managerProperties); + } catch (FileNotFoundException e) { + throw new RuntimeException("Reader failed to find properties file " + managerPropertiesPath, e); + } + try { + properties.load(managerPropertiesReader); + } catch (IOException e) { + throw new RuntimeException("Failed to read properties file " + managerPropertiesPath, e); + } + + return new ManagedTopicsService(properties); + } + public static void main(String[] args) throws Exception { + if (args.length > 0) { + String managerPropertiesPath = args[0]; + managerStart(managerPropertiesPath); + return; + } + if (topicName.length() < 1) throw new Exception("Missing environment variable 'TOPIC_NAME'!"); if (zookeeperConnect.length() < 1) throw new Exception("Missing environment variable 'ZOOKEEKER_CONNECT'"); diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java new file mode 100644 index 0000000..2529dd0 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java @@ -0,0 +1,55 @@ +package se.yolean.kafka.topic.client.cli; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Properties; +import java.util.concurrent.ExecutorService; + +import com.fasterxml.jackson.databind.util.Annotations; +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.nurkiewicz.asyncretry.RetryExecutor; + +import se.yolean.kafka.topic.client.config.ManagerConfigModule; +import se.yolean.kafka.topic.client.config.MetricsModule; +import se.yolean.kafka.topic.client.config.ExecutorRetryProviderForInit; +import se.yolean.kafka.topic.client.tasks.RestProxySetup; +import se.yolean.kafka.topic.client.tasks.SchemaRegistrySetup; + +public class ManagedTopicsService { + + public final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + public ManagedTopicsService(Properties config) { + log.info("Starting Topic Manager Service", "hostname", getHostname()); + + Injector initContext = Guice.createInjector( + new ManagerConfigModule(config), + new MetricsModule() + ); + + MetricsModule.Exporter exporter = initContext.getInstance(MetricsModule.Exporter.class); + log.info("Metrics exporter", "status", exporter.getStatus(), "port", exporter.getHttpPort()); + + SchemaRegistrySetup shemaRegistry = initContext.getInstance(SchemaRegistrySetup.class); + + RestProxySetup restProxy = initContext.getInstance(RestProxySetup.class); + + initContext.getInstance(ExecutorRetryProviderForInit.class); + RetryExecutor ex = null; + ex.getWithRetry(() -> new String()); + } + + String getHostname() { + try { + return InetAddress.getLocalHost().getHostName(); + } catch (UnknownHostException e) { + throw new RuntimeException("Failed to get hostname", e); + } + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/AdminConsumerPropsProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/AdminConsumerPropsProvider.java new file mode 100644 index 0000000..54d4c32 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/AdminConsumerPropsProvider.java @@ -0,0 +1,39 @@ +package se.yolean.kafka.topic.client.config; + +import java.util.Properties; + +import javax.inject.Inject; +import javax.inject.Named; +import javax.inject.Provider; + +/** + * Consume the admin topic. + */ +public class AdminConsumerPropsProvider implements Provider { + + /** + * Same ID in all replicas, means they act as a consumer group. + * + * Value = {@value} + */ + public static final String CONSUMER_GROUP_ID = "kafka-topic-client"; + + private String bootstrap; + + @Inject + public AdminConsumerPropsProvider(@Named("config:bootstrap") String bootstrap) { + this.bootstrap = bootstrap; + } + + @Override + public Properties get() { + Properties props = new Properties(); + props.put("bootstrap.servers", bootstrap); + props.put("group.id", CONSUMER_GROUP_ID); + props.put("enable.auto.commit", "false"); + props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); + props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); + return props; + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java new file mode 100644 index 0000000..da8f118 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java @@ -0,0 +1,19 @@ +package se.yolean.kafka.topic.client.config; + +import javax.inject.Provider; + +import com.evanlennick.retry4j.CallExecutor; + +public class CallExecutorProvider implements Provider { + + public CallExecutorProvider() { + // TODO Auto-generated constructor stub + } + + @Override + public CallExecutor get() { + // TODO Auto-generated method stub + return null; + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java new file mode 100644 index 0000000..7c65d28 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java @@ -0,0 +1,37 @@ +package se.yolean.kafka.topic.client.config; + +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import javax.inject.Inject; +import javax.inject.Named; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.DescribeClusterResult; + +import se.yolean.kafka.topic.client.service.StoreInitializationException; + +public class ClusterDescriptionProvider { + + private String bootstrap; + + @Inject + public ClusterDescriptionProvider(@Named("config:bootstrap") String bootstrap) { + this.bootstrap = bootstrap; + } + + public void get() { + Properties props = new Properties(); + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrap); + + AdminClient admin = AdminClient.create(props); + + DescribeClusterResult describeCluster = admin.describeCluster(); + + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/DocumentedProperty.java b/src/main/java/se/yolean/kafka/topic/client/config/DocumentedProperty.java new file mode 100644 index 0000000..4cc90d0 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/DocumentedProperty.java @@ -0,0 +1,74 @@ +package se.yolean.kafka.topic.client.config; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class DocumentedProperty { + + private static final Map all = new LinkedHashMap<>(1); + + static { + new DocumentedProperty("bootstrap.servers", Type.Str, true) + .setDescription("What any Kafka client nees"); + } + + public static final boolean has(String key) { + return all.containsKey(key); + } + + public static final DocumentedProperty get(String key) { + return all.get(key); + } + + /** + * Note that any property used as @Inject will be required regardless. + */ + public static final List getRequired() { + return all.entrySet().stream() + .filter(p -> p.getValue().isRequired()) + .map(p -> p.getKey()) + .collect(Collectors.toList()); + } + + public enum Type { + Str, + Int, + Bool + } + + private String key; + private Type type; + private boolean isRequired; + private String description = null; + + private DocumentedProperty(String key, Type type, boolean isRequired) { + this.key = key; + this.type = type; + this.isRequired = isRequired; + all.put(key, this); + } + + private DocumentedProperty setDescription(String description) { + this.description = description; + return this; + } + + public String getKey() { + return key; + } + + public Type getType() { + return type; + } + + public String getDescription() { + return description; + } + + public boolean isRequired() { + return isRequired; + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java new file mode 100644 index 0000000..5710caa --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java @@ -0,0 +1,26 @@ +package se.yolean.kafka.topic.client.config; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +import javax.inject.Provider; + +import com.nurkiewicz.asyncretry.AsyncRetryExecutor; +import com.nurkiewicz.asyncretry.RetryExecutor; + +public class ExecutorRetryProviderForInit implements Provider { + + @Override + public RetryExecutor get() { + ScheduledExecutorService concurrency = Executors.newSingleThreadScheduledExecutor(); + AsyncRetryExecutor executor = new AsyncRetryExecutor(concurrency) + //.retryOn(Throwable.class) + .withExponentialBackoff(500, 2) //500ms times 2 after each retry + .withMaxDelay(10_000) //10 seconds + .withUniformJitter() //add between +/- 100 ms randomly + .withMaxRetries(20); + return executor; + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java new file mode 100644 index 0000000..4741084 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java @@ -0,0 +1,28 @@ +package se.yolean.kafka.topic.client.config; + +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +import javax.inject.Provider; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +public class ExecutorServiceProvider implements Provider { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + private ScheduledExecutorService shared = null; + + @Override + public ScheduledExecutorService get() { + if (shared == null) { + log.info("Creating new executor"); + shared = Executors.newSingleThreadScheduledExecutor(); + } else { + log.warn("Reusing shared executor instance"); + } + return shared; + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java b/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java new file mode 100644 index 0000000..14920e3 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java @@ -0,0 +1,41 @@ +package se.yolean.kafka.topic.client.config; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.Properties; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; +import com.google.inject.AbstractModule; +import com.google.inject.name.Names; +import com.nurkiewicz.asyncretry.RetryExecutor; + +public class ManagerConfigModule extends AbstractModule { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + private Properties config; + + public ManagerConfigModule(Properties config) { + this.config = config; + logConfigValues(); + } + + void logConfigValues() { + StringWriter writer = new StringWriter(); + config.list(new PrintWriter(writer)); + log.info("Topic Manager config: " + writer.getBuffer().toString()); + } + + @Override + protected void configure() { + Names.bindProperties(super.binder(), this.config); + + bind(ScheduledExecutorService.class).toProvider(ExecutorServiceProvider.class); + bind(RetryExecutor.class).toProvider(ExecutorRetryProviderForInit.class); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java b/src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java new file mode 100644 index 0000000..24c70fd --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java @@ -0,0 +1,66 @@ +package se.yolean.kafka.topic.client.config; + +import java.io.IOException; + +import javax.inject.Inject; +import javax.inject.Named; +import javax.inject.Provider; + +import com.google.inject.AbstractModule; + +import io.prometheus.client.exporter.HTTPServer; + +public class MetricsModule extends AbstractModule implements Provider { + + @Inject + @Named("prometheus.exporter.port") + private int port; + + @Override + protected void configure() { + bind(Exporter.class).toProvider(this).asEagerSingleton(); + } + + @Override + public Exporter get() { + HTTPServer server; + try { + server = new HTTPServer(port); + } catch (IOException e) { + throw new RuntimeException("Failed to start metrics exporter on port " + port, e); + } + bind(HTTPServer.class).toInstance(server); + + return new Exporter() { + + public int getHttpPort() { + return port; + } + + public void shutdown() { + server.stop(); + } + + @Override + public Status getStatus() { + return Status.running; + } + + }; + } + + public interface Exporter { + + enum Status { + running + } + + int getHttpPort(); + + void shutdown(); + + Status getStatus(); + + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java b/src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java new file mode 100644 index 0000000..c4115b1 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java @@ -0,0 +1,7 @@ +package se.yolean.kafka.topic.client.executor; + +import java.util.concurrent.ExecutorService; + +public interface RetryingExecutorService extends ExecutorService { + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java index e89b037..1a87ac0 100644 --- a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java +++ b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientPropsProvider.java @@ -8,6 +8,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig; +@Deprecated // Inject AdminClient directly public class AdminClientPropsProvider implements Provider { private String bootstrap; diff --git a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientProvider.java b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientProvider.java new file mode 100644 index 0000000..00b8261 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/service/AdminClientProvider.java @@ -0,0 +1,34 @@ +package se.yolean.kafka.topic.client.service; + +import java.util.Properties; + +import javax.inject.Inject; +import javax.inject.Named; +import javax.inject.Provider; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +public class AdminClientProvider implements Provider { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + private String bootstrap; + + @Inject + public AdminClientProvider(@Named("bootstrap.servers") String bootstrap) { + this.bootstrap = bootstrap; + } + + @Override + public AdminClient get() { + Properties props = new Properties(); + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrap); + log.debug("Creating AdminClient", "bootstrap", bootstrap); + return AdminClient.create(props); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java b/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java new file mode 100644 index 0000000..0b48762 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java @@ -0,0 +1,90 @@ +package se.yolean.kafka.topic.client.tasks; + +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import javax.inject.Inject; +import javax.inject.Provider; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.DescribeClusterOptions; +import org.apache.kafka.clients.admin.DescribeClusterResult; +import org.apache.kafka.common.Node; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +import io.prometheus.client.Counter; + +public class BrokerProbe implements Task, Provider { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + // Should be made configurable, but let's keep them short and work on back-off now + private final int describeTimeoutMs = 1; // is this used, or overridden at each call? + private final int nodesTimeoutMs = 10; + + static final Counter timeouts = Counter.build() + .name("timeouts").labelNames("broker_probe").help("AdminClient.describeCluster timeouts").register(); + + @Inject + private AdminClient adminClient; + + @Override + public Integer call() throws Exception { + // TODO Auto-generated method stub + return null; + } + + @Override + public BrokerStatus get() { + DescribeClusterOptions options = new DescribeClusterOptions(); + options.timeoutMs(describeTimeoutMs); + DescribeClusterResult describe = adminClient.describeCluster(options); + return new BrokerStatusNotCached(describe, nodesTimeoutMs); + } + + class BrokerStatusNotCached implements BrokerStatus { + + private DescribeClusterResult describe; + private int timeoutMs; + + private BrokerStatusNotCached(DescribeClusterResult describe, int timeoutMs) { + this.describe = describe; + this.timeoutMs = timeoutMs; + } + + public Node getController() { + try { + return describe.controller().get(timeoutMs, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + log.error("Interrupted when waiting for controller status", e); + } catch (ExecutionException e) { + log.error("Execution error for controller status", e); + } catch (TimeoutException e) { + log.warn("Timeout waiting for controller response", "ms", timeoutMs, e); + timeouts.inc(); + } + return null; + } + + public Collection getNodes() { + try { + return describe.nodes().get(timeoutMs, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + log.error("Interrupted when waiting for controller status", e); + } catch (ExecutionException e) { + log.error("Execution error for controller status", e); + } catch (TimeoutException e) { + log.warn("Timeout waiting for controller response", "ms", timeoutMs, e); + timeouts.inc(); + } + return Collections.emptySet(); + } + + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java b/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java new file mode 100644 index 0000000..501eca9 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java @@ -0,0 +1,5 @@ +package se.yolean.kafka.topic.client.tasks; + +public interface BrokerStatus { + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/RestProxySetup.java b/src/main/java/se/yolean/kafka/topic/client/tasks/RestProxySetup.java new file mode 100644 index 0000000..0b9af63 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/tasks/RestProxySetup.java @@ -0,0 +1,15 @@ +package se.yolean.kafka.topic.client.tasks; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +public class RestProxySetup implements Runnable { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + @Override + public void run() { + log.warn("TODO set up REST endpoint for topic creation"); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/SchemaRegistrySetup.java b/src/main/java/se/yolean/kafka/topic/client/tasks/SchemaRegistrySetup.java new file mode 100644 index 0000000..d181b17 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/tasks/SchemaRegistrySetup.java @@ -0,0 +1,15 @@ +package se.yolean.kafka.topic.client.tasks; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +public class SchemaRegistrySetup implements Runnable { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + @Override + public void run() { + log.warn("TODO set up REST endpoint for topic creation"); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/Task.java b/src/main/java/se/yolean/kafka/topic/client/tasks/Task.java new file mode 100644 index 0000000..8dfd65a --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/tasks/Task.java @@ -0,0 +1,15 @@ +package se.yolean.kafka.topic.client.tasks; + +import java.util.concurrent.Callable; + +/** + * Anything that might need retries or benefit from concurrency + * would have to be wrapped anyway, so let's have this task abstraction. + * + * @param The type returned by the actual {@link Callable} + * + * @deprecated Use {@link Callable} or {@link Runnable} depending on use case. + */ +public interface Task extends Callable { + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java b/src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java new file mode 100644 index 0000000..595a9db --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java @@ -0,0 +1,29 @@ +package se.yolean.kafka.topic.client.tasks; + +import com.evanlennick.retry4j.CallExecutor; +import com.evanlennick.retry4j.CallResults; +import com.evanlennick.retry4j.config.RetryConfig; +import com.evanlennick.retry4j.config.RetryConfigBuilder; + +public class TaskRetryBackoffWrapper implements Task { + + private RetryConfig config; + private Task task; + + public TaskRetryBackoffWrapper(Task actualTask) { + this.task = actualTask; + config = new RetryConfigBuilder() + .failOnAnyException() + .withExponentialBackoff() + .withMaxNumberOfTries(10) + .build(); + } + + @Override + public T call() throws Exception { + CallExecutor executor = new CallExecutor(config); + CallResults execute = executor.execute(task); + return execute.getResult(); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/mgmt/TopicConsumerLoop.java b/src/main/java/se/yolean/kafka/topic/mgmt/TopicConsumerLoop.java new file mode 100644 index 0000000..47a6e79 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/mgmt/TopicConsumerLoop.java @@ -0,0 +1,10 @@ +package se.yolean.kafka.topic.mgmt; + +import org.apache.kafka.clients.consumer.Consumer; + +import se.yolean.kafka.topic.declaration.Topic; + +public class TopicConsumerLoop { + + +} diff --git a/src/main/resources/default.properties b/src/main/resources/default.properties new file mode 100644 index 0000000..1472065 --- /dev/null +++ b/src/main/resources/default.properties @@ -0,0 +1 @@ +prometheus.exporter.port=5000 diff --git a/src/test/java/se/yolean/kafka/topic/client/config/DocumentedPropertyTest.java b/src/test/java/se/yolean/kafka/topic/client/config/DocumentedPropertyTest.java new file mode 100644 index 0000000..84ada36 --- /dev/null +++ b/src/test/java/se/yolean/kafka/topic/client/config/DocumentedPropertyTest.java @@ -0,0 +1,22 @@ +package se.yolean.kafka.topic.client.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +public class DocumentedPropertyTest { + + @Test + public void testBootstrapServers() { + assertTrue(DocumentedProperty.has("bootstrap.servers")); + assertTrue("Should be listed as required property", DocumentedProperty.getRequired() + .stream().anyMatch(key -> "bootstrap.servers".equals(key))); + DocumentedProperty p = DocumentedProperty.get("bootstrap.servers"); + assertNotNull(".set get should return the property", p); + assertEquals(DocumentedProperty.Type.Str, p.getType()); + assertNotNull(p.getDescription()); + } + +} diff --git a/src/test/java/se/yolean/kafka/topic/client/config/ManagerConfigModuleTest.java b/src/test/java/se/yolean/kafka/topic/client/config/ManagerConfigModuleTest.java new file mode 100644 index 0000000..0ac0713 --- /dev/null +++ b/src/test/java/se/yolean/kafka/topic/client/config/ManagerConfigModuleTest.java @@ -0,0 +1,34 @@ +package se.yolean.kafka.topic.client.config; + +import static org.junit.Assert.assertEquals; + +import java.util.Properties; + +import javax.inject.Inject; +import javax.inject.Named; + +import org.junit.Test; + +import com.google.inject.Guice; +import com.google.inject.Injector; + +public class ManagerConfigModuleTest { + + @Test + public void test() { + Properties props = new Properties(); + props.setProperty("bootstrap.servers", "PLAINTEXT://my-test-value:9092"); + Injector injector = Guice.createInjector(new ManagerConfigModule(props)); + TestService1 t1 = injector.getInstance(TestService1.class); + assertEquals("PLAINTEXT://my-test-value:9092", t1.boostrapServers); + } + + static class TestService1 { + + @Inject + @Named("bootstrap.servers") + private String boostrapServers; + + } + +} From eb63aea9c0b81bc2f92a5f9afd95cd66feab83cb Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Thu, 30 Nov 2017 21:32:42 +0100 Subject: [PATCH 17/23] Testability! for anyone who has my docker-compose.yml :) --- build.gradle | 4 + .../kafka/topic/client/config/ItestProps.java | 60 +++++++++++++ .../retryable/BrokerProbeIntegrationTest.java | 27 ++++++ .../resources/itest-dockercompose.properties | 1 + .../yolean/kafka/topic/client/cli/Client.java | 2 +- .../client/cli/ManagedTopicsService.java | 6 +- .../executor/RetryingExecutorService.java | 7 -- .../topic/client/retryable/BrokerProbe.java | 81 +++++++++++++++++ .../retryable/NotEnoughBrokersException.java | 11 +++ .../{tasks => retryable}/RestProxySetup.java | 2 +- .../SchemaRegistrySetup.java | 2 +- .../client/{tasks => retryable}/Task.java | 2 +- .../kafka/topic/client/tasks/BrokerProbe.java | 90 ------------------- .../topic/client/tasks/BrokerStatus.java | 5 -- .../client/tasks/TaskRetryBackoffWrapper.java | 29 ------ src/main/resources/default.properties | 3 + 16 files changed, 195 insertions(+), 137 deletions(-) create mode 100644 src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java create mode 100644 src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java create mode 100644 src/itest/resources/itest-dockercompose.properties delete mode 100644 src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/retryable/NotEnoughBrokersException.java rename src/main/java/se/yolean/kafka/topic/client/{tasks => retryable}/RestProxySetup.java (86%) rename src/main/java/se/yolean/kafka/topic/client/{tasks => retryable}/SchemaRegistrySetup.java (86%) rename src/main/java/se/yolean/kafka/topic/client/{tasks => retryable}/Task.java (88%) delete mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java delete mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java delete mode 100644 src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java diff --git a/build.gradle b/build.gradle index 4644a5b..0862c80 100644 --- a/build.gradle +++ b/build.gradle @@ -36,6 +36,10 @@ dependencies { compile group: 'io.confluent', name: 'kafka-schema-registry-client', version: '4.0.0' + compile group: 'com.nurkiewicz.asyncretry', name: 'asyncretry', version: '0.0.7' + //compile group: 'com.github.rholder', name: 'guava-retrying', version: '2.0.0' + compile group: 'com.evanlennick', name: 'retry4j', version: '0.7.2' + runtime group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.25' runtime group: 'org.slf4j', name: 'log4j-over-slf4j', version: '1.7.25' compile group: 'structlog4j', name: 'structlog4j-api', version: '1.0.0' diff --git a/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java b/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java new file mode 100644 index 0000000..2eeb355 --- /dev/null +++ b/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java @@ -0,0 +1,60 @@ +package se.yolean.kafka.topic.client.config; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import com.google.inject.AbstractModule; +import com.google.inject.name.Names; + +import io.prometheus.client.CollectorRegistry; +import se.yolean.kafka.topic.client.cli.Client; + +public class ItestProps extends AbstractModule { + + public static final CollectorRegistry PROMETHEUS_REGISTRY = new CollectorRegistry(); + + //public static final ItestProps DOCKER_COMPOSE = new ItestProps("itest-dockercompose.properties"); + public static final ItestProps DOCKER_COMPOSE = new ItestProps(new File("src/itest/resources/itest-dockercompose.properties")); + + private Properties config; + + public ItestProps(String itestPropertiesFielnameInClasspathRoot) { + Properties properties = new Properties(); + try { + InputStream defaultProperties = Client.class.getResourceAsStream(Client.DEFAULT_PROPERTIES_FILE); + properties.load(defaultProperties); + InputStream itestProperties = this.getClass().getResourceAsStream(itestPropertiesFielnameInClasspathRoot); + properties.load(itestProperties); + } catch (IOException e) { + throw new RuntimeException(e); + } + this.config = properties; + } + + public ItestProps(File itestPropertiesFile) { + Properties properties = new Properties(); + try { + FileReader defaults = new FileReader(new File("src/main/resources/" + Client.DEFAULT_PROPERTIES_FILE)); + properties.load(defaults); + FileReader itest = new FileReader(itestPropertiesFile); + properties.load(itest); + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + this.config = properties; + } + + @Override + protected void configure() { + System.out.print("Itest props: "); + this.config.list(System.out); + Names.bindProperties(super.binder(), this.config); + } + +} diff --git a/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java new file mode 100644 index 0000000..bd4cf38 --- /dev/null +++ b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java @@ -0,0 +1,27 @@ +package se.yolean.kafka.topic.client.retryable; + +import org.apache.kafka.clients.admin.AdminClient; +import org.junit.Test; + +import com.google.inject.AbstractModule; +import com.google.inject.Guice; +import com.google.inject.Injector; + +import se.yolean.kafka.topic.client.config.ItestProps; +import se.yolean.kafka.topic.client.service.AdminClientProvider; + +public class BrokerProbeIntegrationTest { + + @Test + public void test() throws Exception { + Injector conf = Guice.createInjector(ItestProps.DOCKER_COMPOSE, new AbstractModule() { + @Override + protected void configure() { + bind(AdminClient.class).toProvider(AdminClientProvider.class); + } + }); + BrokerProbe probe = conf.getInstance(BrokerProbe.class); + probe.call(); + } + +} diff --git a/src/itest/resources/itest-dockercompose.properties b/src/itest/resources/itest-dockercompose.properties new file mode 100644 index 0000000..4ab8db7 --- /dev/null +++ b/src/itest/resources/itest-dockercompose.properties @@ -0,0 +1 @@ +bootstrap.servers=PLAINTEXT://localhost:9094 diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java index 39ec71b..f5e6dba 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java @@ -19,7 +19,7 @@ public class Client { - final static String DEFAULT_PROPERTIES_FILE = "defaults.properties"; + public final static String DEFAULT_PROPERTIES_FILE = "default.properties"; final static String topicName = System.getenv("TOPIC_NAME"); final static boolean resetTopic = Boolean.parseBoolean(System.getenv("RESET_TOPIC")); diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java index 2529dd0..d2285fc 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java @@ -16,9 +16,9 @@ import se.yolean.kafka.topic.client.config.ManagerConfigModule; import se.yolean.kafka.topic.client.config.MetricsModule; +import se.yolean.kafka.topic.client.retryable.RestProxySetup; +import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup; import se.yolean.kafka.topic.client.config.ExecutorRetryProviderForInit; -import se.yolean.kafka.topic.client.tasks.RestProxySetup; -import se.yolean.kafka.topic.client.tasks.SchemaRegistrySetup; public class ManagedTopicsService { @@ -35,6 +35,8 @@ public ManagedTopicsService(Properties config) { MetricsModule.Exporter exporter = initContext.getInstance(MetricsModule.Exporter.class); log.info("Metrics exporter", "status", exporter.getStatus(), "port", exporter.getHttpPort()); + + SchemaRegistrySetup shemaRegistry = initContext.getInstance(SchemaRegistrySetup.class); RestProxySetup restProxy = initContext.getInstance(RestProxySetup.class); diff --git a/src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java b/src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java deleted file mode 100644 index c4115b1..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/executor/RetryingExecutorService.java +++ /dev/null @@ -1,7 +0,0 @@ -package se.yolean.kafka.topic.client.executor; - -import java.util.concurrent.ExecutorService; - -public interface RetryingExecutorService extends ExecutorService { - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java new file mode 100644 index 0000000..484129f --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java @@ -0,0 +1,81 @@ +package se.yolean.kafka.topic.client.retryable; + +import java.time.Duration; +import java.util.Collection; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import javax.inject.Inject; +import javax.inject.Named; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.DescribeClusterOptions; +import org.apache.kafka.clients.admin.DescribeClusterResult; +import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.Node; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +import io.prometheus.client.Counter; + +public class BrokerProbe implements Callable { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + // Should be made configurable, but let's keep them short and work on back-off + + static final Counter timeouts = Counter.build().name("timeouts").labelNames("broker_probe") + .help("AdminClient.describeCluster timeouts").register(); + + @Inject + private AdminClient adminClient; + + @Inject + @Named("brokers.describe.timeout") + private int describeTimeoutMs = 1; + + @Inject + @Named("brokers.describe.get.timeout") + private int nodesTimeoutMs = 10; + + @Inject + @Named("brokers.available.min") + private int brokersAvailableMin; + + @Override + public KafkaStatus call() throws Exception { + DescribeClusterOptions options = new DescribeClusterOptions(); + options.timeoutMs(describeTimeoutMs); + DescribeClusterResult describe = adminClient.describeCluster(options); + + KafkaFuture> nodesFuture = describe.nodes(); + + Collection nodes = null; + try { + nodes = nodesFuture.get(nodesTimeoutMs, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + log.error("Interrupted when waiting for controller status", e); + } catch (ExecutionException e) { + log.error("Execution error for controller status", e); + } catch (TimeoutException e) { + log.warn("Timeout waiting for controller response", "ms", nodesTimeoutMs, e); + timeouts.inc(); + } + + if (nodes == null) { + throw new Exception("No broker information available"); + } + if (nodes.size() < brokersAvailableMin) { + throw new NotEnoughBrokersException(brokersAvailableMin, nodes.size()); + } + + return new KafkaStatus(); + } + + public static class KafkaStatus { + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/NotEnoughBrokersException.java b/src/main/java/se/yolean/kafka/topic/client/retryable/NotEnoughBrokersException.java new file mode 100644 index 0000000..d2c0fc8 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/NotEnoughBrokersException.java @@ -0,0 +1,11 @@ +package se.yolean.kafka.topic.client.retryable; + +public class NotEnoughBrokersException extends Exception { + + private static final long serialVersionUID = 1L; + + public NotEnoughBrokersException(int expected, int actual) { + super("Got " + actual + " brokers but at least " + expected + " is required"); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/RestProxySetup.java b/src/main/java/se/yolean/kafka/topic/client/retryable/RestProxySetup.java similarity index 86% rename from src/main/java/se/yolean/kafka/topic/client/tasks/RestProxySetup.java rename to src/main/java/se/yolean/kafka/topic/client/retryable/RestProxySetup.java index 0b9af63..0e9ae89 100644 --- a/src/main/java/se/yolean/kafka/topic/client/tasks/RestProxySetup.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/RestProxySetup.java @@ -1,4 +1,4 @@ -package se.yolean.kafka.topic.client.tasks; +package se.yolean.kafka.topic.client.retryable; import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/SchemaRegistrySetup.java b/src/main/java/se/yolean/kafka/topic/client/retryable/SchemaRegistrySetup.java similarity index 86% rename from src/main/java/se/yolean/kafka/topic/client/tasks/SchemaRegistrySetup.java rename to src/main/java/se/yolean/kafka/topic/client/retryable/SchemaRegistrySetup.java index d181b17..c157069 100644 --- a/src/main/java/se/yolean/kafka/topic/client/tasks/SchemaRegistrySetup.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/SchemaRegistrySetup.java @@ -1,4 +1,4 @@ -package se.yolean.kafka.topic.client.tasks; +package se.yolean.kafka.topic.client.retryable; import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/Task.java b/src/main/java/se/yolean/kafka/topic/client/retryable/Task.java similarity index 88% rename from src/main/java/se/yolean/kafka/topic/client/tasks/Task.java rename to src/main/java/se/yolean/kafka/topic/client/retryable/Task.java index 8dfd65a..fddbdc2 100644 --- a/src/main/java/se/yolean/kafka/topic/client/tasks/Task.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/Task.java @@ -1,4 +1,4 @@ -package se.yolean.kafka.topic.client.tasks; +package se.yolean.kafka.topic.client.retryable; import java.util.concurrent.Callable; diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java b/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java deleted file mode 100644 index 0b48762..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerProbe.java +++ /dev/null @@ -1,90 +0,0 @@ -package se.yolean.kafka.topic.client.tasks; - -import java.util.Collection; -import java.util.Collections; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import javax.inject.Inject; -import javax.inject.Provider; - -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.DescribeClusterOptions; -import org.apache.kafka.clients.admin.DescribeClusterResult; -import org.apache.kafka.common.Node; - -import com.github.structlog4j.ILogger; -import com.github.structlog4j.SLoggerFactory; - -import io.prometheus.client.Counter; - -public class BrokerProbe implements Task, Provider { - - private final ILogger log = SLoggerFactory.getLogger(this.getClass()); - - // Should be made configurable, but let's keep them short and work on back-off now - private final int describeTimeoutMs = 1; // is this used, or overridden at each call? - private final int nodesTimeoutMs = 10; - - static final Counter timeouts = Counter.build() - .name("timeouts").labelNames("broker_probe").help("AdminClient.describeCluster timeouts").register(); - - @Inject - private AdminClient adminClient; - - @Override - public Integer call() throws Exception { - // TODO Auto-generated method stub - return null; - } - - @Override - public BrokerStatus get() { - DescribeClusterOptions options = new DescribeClusterOptions(); - options.timeoutMs(describeTimeoutMs); - DescribeClusterResult describe = adminClient.describeCluster(options); - return new BrokerStatusNotCached(describe, nodesTimeoutMs); - } - - class BrokerStatusNotCached implements BrokerStatus { - - private DescribeClusterResult describe; - private int timeoutMs; - - private BrokerStatusNotCached(DescribeClusterResult describe, int timeoutMs) { - this.describe = describe; - this.timeoutMs = timeoutMs; - } - - public Node getController() { - try { - return describe.controller().get(timeoutMs, TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - log.error("Interrupted when waiting for controller status", e); - } catch (ExecutionException e) { - log.error("Execution error for controller status", e); - } catch (TimeoutException e) { - log.warn("Timeout waiting for controller response", "ms", timeoutMs, e); - timeouts.inc(); - } - return null; - } - - public Collection getNodes() { - try { - return describe.nodes().get(timeoutMs, TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - log.error("Interrupted when waiting for controller status", e); - } catch (ExecutionException e) { - log.error("Execution error for controller status", e); - } catch (TimeoutException e) { - log.warn("Timeout waiting for controller response", "ms", timeoutMs, e); - timeouts.inc(); - } - return Collections.emptySet(); - } - - } - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java b/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java deleted file mode 100644 index 501eca9..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/tasks/BrokerStatus.java +++ /dev/null @@ -1,5 +0,0 @@ -package se.yolean.kafka.topic.client.tasks; - -public interface BrokerStatus { - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java b/src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java deleted file mode 100644 index 595a9db..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/tasks/TaskRetryBackoffWrapper.java +++ /dev/null @@ -1,29 +0,0 @@ -package se.yolean.kafka.topic.client.tasks; - -import com.evanlennick.retry4j.CallExecutor; -import com.evanlennick.retry4j.CallResults; -import com.evanlennick.retry4j.config.RetryConfig; -import com.evanlennick.retry4j.config.RetryConfigBuilder; - -public class TaskRetryBackoffWrapper implements Task { - - private RetryConfig config; - private Task task; - - public TaskRetryBackoffWrapper(Task actualTask) { - this.task = actualTask; - config = new RetryConfigBuilder() - .failOnAnyException() - .withExponentialBackoff() - .withMaxNumberOfTries(10) - .build(); - } - - @Override - public T call() throws Exception { - CallExecutor executor = new CallExecutor(config); - CallResults execute = executor.execute(task); - return execute.getResult(); - } - -} diff --git a/src/main/resources/default.properties b/src/main/resources/default.properties index 1472065..5d6c4e9 100644 --- a/src/main/resources/default.properties +++ b/src/main/resources/default.properties @@ -1 +1,4 @@ prometheus.exporter.port=5000 +brokers.available.min=1 +brokers.describe.timeout=10 +brokers.describe.get.timeout=10 From 55439ba51b5014bdc755b45654b80121d1b38977 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Fri, 1 Dec 2017 03:55:35 +0100 Subject: [PATCH 18/23] Tests error handling in AdminClient --- build.gradle | 2 - .../kafka/topic/client/config/ItestProps.java | 12 ++- .../retryable/BrokerProbeIntegrationTest.java | 76 ++++++++++++++++++- .../topic/client/retryable/BrokerProbe.java | 35 ++++++--- src/main/resources/default.properties | 10 ++- 5 files changed, 116 insertions(+), 19 deletions(-) diff --git a/build.gradle b/build.gradle index 0862c80..c5c2db1 100644 --- a/build.gradle +++ b/build.gradle @@ -37,8 +37,6 @@ dependencies { compile group: 'io.confluent', name: 'kafka-schema-registry-client', version: '4.0.0' compile group: 'com.nurkiewicz.asyncretry', name: 'asyncretry', version: '0.0.7' - //compile group: 'com.github.rholder', name: 'guava-retrying', version: '2.0.0' - compile group: 'com.evanlennick', name: 'retry4j', version: '0.7.2' runtime group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.25' runtime group: 'org.slf4j', name: 'log4j-over-slf4j', version: '1.7.25' diff --git a/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java b/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java index 2eeb355..ef320ca 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java +++ b/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java @@ -15,8 +15,6 @@ public class ItestProps extends AbstractModule { - public static final CollectorRegistry PROMETHEUS_REGISTRY = new CollectorRegistry(); - //public static final ItestProps DOCKER_COMPOSE = new ItestProps("itest-dockercompose.properties"); public static final ItestProps DOCKER_COMPOSE = new ItestProps(new File("src/itest/resources/itest-dockercompose.properties")); @@ -50,6 +48,16 @@ public ItestProps(File itestPropertiesFile) { this.config = properties; } + public ItestProps override(String key, String value) { + this.config.setProperty(key, value); + return this; + } + + public ItestProps override(String key, int value) { + this.config.setProperty(key, Integer.toString(value)); + return this; + } + @Override protected void configure() { System.out.print("Itest props: "); diff --git a/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java index bd4cf38..36f2760 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java @@ -1,6 +1,10 @@ package se.yolean.kafka.topic.client.retryable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + import org.apache.kafka.clients.admin.AdminClient; +import org.junit.After; import org.junit.Test; import com.google.inject.AbstractModule; @@ -12,10 +16,17 @@ public class BrokerProbeIntegrationTest { + /** + * Troubleshooting issues with tests working only when run individually. + */ + @After + public void waitForAdminClientClose() throws InterruptedException { + Thread.sleep(1000); + } + @Test public void test() throws Exception { Injector conf = Guice.createInjector(ItestProps.DOCKER_COMPOSE, new AbstractModule() { - @Override protected void configure() { bind(AdminClient.class).toProvider(AdminClientProvider.class); } @@ -24,4 +35,67 @@ protected void configure() { probe.call(); } + @Test + public void testTimeoutDescribeNodes() { + Injector conf = Guice.createInjector(ItestProps.DOCKER_COMPOSE.override("brokers.describe.timeout.ms", 1), + new AbstractModule() { + protected void configure() { + bind(AdminClient.class).toProvider(AdminClientProvider.class); + } + }); + BrokerProbe.timeouts.clear(); + try { + conf.getInstance(BrokerProbe.class).call(); + fail("Should have thrown exception"); + } catch (org.apache.kafka.common.errors.TimeoutException e) { + // ok, we don't wrap this unless we can also document a specific behavior + } catch (Exception e) { + fail("Should have thrown a specific exception"); + } + assertEquals(1, BrokerProbe.timeouts.labels("broker_probe").get(), 0.1); + } + + @Test + public void testTimeoutDescribeNodesGet() { + Injector conf = Guice.createInjector(ItestProps.DOCKER_COMPOSE + .override("brokers.describe.get.timeout.ms", 1), + new AbstractModule() { + protected void configure() { + bind(AdminClient.class).toProvider(AdminClientProvider.class); + } + }); + BrokerProbe.timeouts.clear(); + try { + conf.getInstance(BrokerProbe.class).call(); + fail("Should have thrown exception"); + } catch (java.util.concurrent.TimeoutException e) { + // ok, we don't wrap this unless we can also document a specific behavior + } catch (Exception e) { + fail("Should have thrown a specific exception"); + } + assertEquals(1, BrokerProbe.timeouts.labels("broker_probe").get(), 0.1); + } + + @Test + public void testBrokersNotEnough() { + Injector conf = Guice.createInjector(ItestProps.DOCKER_COMPOSE + .override("brokers.describe.available.min", 9), + new AbstractModule() { + protected void configure() { + bind(AdminClient.class).toProvider(AdminClientProvider.class); + } + }); + BrokerProbe.timeouts.clear(); + try { + conf.getInstance(BrokerProbe.class).call(); + fail("Should have thrown exception"); + } catch (NotEnoughBrokersException e) { + // good + } catch (Exception e) { + e.printStackTrace(); + fail("Should have thrown a specific exception"); + } + assertEquals(0, BrokerProbe.timeouts.labels("broker_probe").get(), 0.1); + } + } diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java index 484129f..4797258 100644 --- a/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java @@ -27,22 +27,23 @@ public class BrokerProbe implements Callable { // Should be made configurable, but let's keep them short and work on back-off - static final Counter timeouts = Counter.build().name("timeouts").labelNames("broker_probe") - .help("AdminClient.describeCluster timeouts").register(); + static final Counter timeouts = Counter.build() + .name("kafkatopics_timeouts").help("AdminClient.describeCluster timeouts") + .labelNames("broker_probe").register(); @Inject private AdminClient adminClient; @Inject - @Named("brokers.describe.timeout") - private int describeTimeoutMs = 1; + @Named("brokers.describe.timeout.ms") + private int describeTimeoutMs; @Inject - @Named("brokers.describe.get.timeout") - private int nodesTimeoutMs = 10; + @Named("brokers.describe.get.timeout.ms") + private int nodesTimeoutMs; @Inject - @Named("brokers.available.min") + @Named("brokers.describe.available.min") private int brokersAvailableMin; @Override @@ -57,12 +58,24 @@ public KafkaStatus call() throws Exception { try { nodes = nodesFuture.get(nodesTimeoutMs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { - log.error("Interrupted when waiting for controller status", e); + log.error("Interrupted when waiting for nodes status", e); + throw e; } catch (ExecutionException e) { - log.error("Execution error for controller status", e); + if (e.getCause() instanceof org.apache.kafka.common.errors.TimeoutException) { + log.warn("Timeout waiting for describe nodes", "ms", describeTimeoutMs, "exception", e.getClass(), + "cause", e.getCause().getClass(), "causeMsg", e.getCause().getMessage()); + timeouts.labels("broker_probe").inc(); + throw (org.apache.kafka.common.errors.TimeoutException) e.getCause(); + } else { + log.error("Execution error for nodes status", e); + throw e; + } } catch (TimeoutException e) { - log.warn("Timeout waiting for controller response", "ms", nodesTimeoutMs, e); - timeouts.inc(); + log.warn("Timeout waiting for nodes response", "ms", nodesTimeoutMs); + timeouts.labels("broker_probe").inc(); + throw e; + } finally { + adminClient.close(); } if (nodes == null) { diff --git a/src/main/resources/default.properties b/src/main/resources/default.properties index 5d6c4e9..6f6195b 100644 --- a/src/main/resources/default.properties +++ b/src/main/resources/default.properties @@ -1,4 +1,8 @@ prometheus.exporter.port=5000 -brokers.available.min=1 -brokers.describe.timeout=10 -brokers.describe.get.timeout=10 + +# Minimum brokers available to consider startup completed +brokers.describe.available.min=1 + +# AdminClient timeouts, see BrokerProbeIntegrationTest for details +brokers.describe.timeout.ms=201 +brokers.describe.get.timeout.ms=202 From 1426314ba05cbe4fdc5cace9cc674b9c4abfdab5 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Fri, 1 Dec 2017 04:25:44 +0100 Subject: [PATCH 19/23] Reuse in tests, gotcha again --- .../kafka/topic/client/config/ItestProps.java | 56 +++++++++++-------- .../retryable/BrokerProbeIntegrationTest.java | 8 --- .../topic/client/retryable/BrokerProbe.java | 2 +- 3 files changed, 35 insertions(+), 31 deletions(-) diff --git a/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java b/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java index ef320ca..e632205 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java +++ b/src/itest/java/se/yolean/kafka/topic/client/config/ItestProps.java @@ -10,7 +10,6 @@ import com.google.inject.AbstractModule; import com.google.inject.name.Names; -import io.prometheus.client.CollectorRegistry; import se.yolean.kafka.topic.client.cli.Client; public class ItestProps extends AbstractModule { @@ -21,19 +20,49 @@ public class ItestProps extends AbstractModule { private Properties config; public ItestProps(String itestPropertiesFielnameInClasspathRoot) { + this(getItestProperties(itestPropertiesFielnameInClasspathRoot)); + } + + public ItestProps(File itestPropertiesFile) { + this(getItestProperties(itestPropertiesFile)); + } + + protected ItestProps(Properties properties) { + this.config = properties; + } + + public ItestProps override(String key, String value) { + Properties properties = new Properties(); + properties.putAll(this.config); + properties.setProperty(key, value); + return new ItestProps(properties); + } + + public ItestProps override(String key, int value) { + return this.override(key, Integer.toString(value)); + } + + @Override + protected void configure() { + System.out.print("Itest props: "); + this.config.list(System.out); + Names.bindProperties(super.binder(), this.config); + } + + private static Properties getItestProperties(String itestPropertiesFielnameInClasspathRoot) { Properties properties = new Properties(); try { InputStream defaultProperties = Client.class.getResourceAsStream(Client.DEFAULT_PROPERTIES_FILE); properties.load(defaultProperties); - InputStream itestProperties = this.getClass().getResourceAsStream(itestPropertiesFielnameInClasspathRoot); + InputStream itestProperties = ItestProps.class.getResourceAsStream(itestPropertiesFielnameInClasspathRoot); properties.load(itestProperties); } catch (IOException e) { throw new RuntimeException(e); } - this.config = properties; + return properties; } - public ItestProps(File itestPropertiesFile) { + private static Properties getItestProperties(File itestPropertiesFile) { Properties properties = new Properties(); try { FileReader defaults = new FileReader(new File("src/main/resources/" + Client.DEFAULT_PROPERTIES_FILE)); @@ -45,24 +74,7 @@ public ItestProps(File itestPropertiesFile) { } catch (IOException e) { throw new RuntimeException(e); } - this.config = properties; - } - - public ItestProps override(String key, String value) { - this.config.setProperty(key, value); - return this; - } - - public ItestProps override(String key, int value) { - this.config.setProperty(key, Integer.toString(value)); - return this; - } - - @Override - protected void configure() { - System.out.print("Itest props: "); - this.config.list(System.out); - Names.bindProperties(super.binder(), this.config); + return properties; } } diff --git a/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java index 36f2760..bfa778a 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java @@ -16,14 +16,6 @@ public class BrokerProbeIntegrationTest { - /** - * Troubleshooting issues with tests working only when run individually. - */ - @After - public void waitForAdminClientClose() throws InterruptedException { - Thread.sleep(1000); - } - @Test public void test() throws Exception { Injector conf = Guice.createInjector(ItestProps.DOCKER_COMPOSE, new AbstractModule() { diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java index 4797258..25db0d2 100644 --- a/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java @@ -75,7 +75,7 @@ public KafkaStatus call() throws Exception { timeouts.labels("broker_probe").inc(); throw e; } finally { - adminClient.close(); + adminClient.close(); // depends on provider impl } if (nodes == null) { From 88be59c9537f7873d33ccf9ec5c1e01c02151b04 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Fri, 1 Dec 2017 04:58:55 +0100 Subject: [PATCH 20/23] Got some sort of structure for integrating different services now --- .../client/cli/ManagedTopicsServiceTest.java | 13 +++++++ .../ClusterDescriptionProviderTest.java | 34 ----------------- .../retryable/BrokerProbeIntegrationTest.java | 3 +- .../yolean/kafka/topic/client/cli/Client.java | 2 +- .../client/cli/ManagedTopicsService.java | 37 ++++++++++++++----- .../AdminClientProvider.java | 2 +- .../client/config/CallExecutorProvider.java | 19 ---------- .../config/ClusterDescriptionProvider.java | 37 ------------------- .../config/ExecutorRetryProviderForInit.java | 1 - .../client/config/ManagerConfigModule.java | 2 - .../topic/client/retryable/BrokerProbe.java | 1 - .../client/retryable/RestProxySetup.java | 12 +++++- .../client/retryable/SchemaRegistrySetup.java | 12 ++++-- 13 files changed, 62 insertions(+), 113 deletions(-) create mode 100644 src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java delete mode 100644 src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java rename src/main/java/se/yolean/kafka/topic/client/{service => config}/AdminClientProvider.java (94%) delete mode 100644 src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java delete mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java diff --git a/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java b/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java new file mode 100644 index 0000000..faea413 --- /dev/null +++ b/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java @@ -0,0 +1,13 @@ +package se.yolean.kafka.topic.client.cli; + +import static org.junit.Assert.*; + +import org.junit.Test; + +public class ManagedTopicsServiceTest { + + @Test + public void testManagedTopicsService() { + } + +} diff --git a/src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java b/src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java deleted file mode 100644 index b8a4950..0000000 --- a/src/itest/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProviderTest.java +++ /dev/null @@ -1,34 +0,0 @@ -package se.yolean.kafka.topic.client.config; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -public class ClusterDescriptionProviderTest { - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - } - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGet() { - - } - -} diff --git a/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java index bfa778a..402625e 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/retryable/BrokerProbeIntegrationTest.java @@ -4,15 +4,14 @@ import static org.junit.Assert.fail; import org.apache.kafka.clients.admin.AdminClient; -import org.junit.After; import org.junit.Test; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; +import se.yolean.kafka.topic.client.config.AdminClientProvider; import se.yolean.kafka.topic.client.config.ItestProps; -import se.yolean.kafka.topic.client.service.AdminClientProvider; public class BrokerProbeIntegrationTest { diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java index f5e6dba..69449a9 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java @@ -1,5 +1,5 @@ package se.yolean.kafka.topic.client.cli; -import kafka.admin.AdminOperationException; + import org.I0Itec.zkclient.ZkClient; import org.I0Itec.zkclient.ZkConnection; diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java index d2285fc..b71dd72 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java @@ -1,13 +1,10 @@ package se.yolean.kafka.topic.client.cli; -import java.io.PrintWriter; -import java.io.StringWriter; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Properties; -import java.util.concurrent.ExecutorService; +import java.util.concurrent.CompletableFuture; -import com.fasterxml.jackson.databind.util.Annotations; import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; import com.google.inject.Guice; @@ -16,9 +13,12 @@ import se.yolean.kafka.topic.client.config.ManagerConfigModule; import se.yolean.kafka.topic.client.config.MetricsModule; +import se.yolean.kafka.topic.client.retryable.BrokerProbe; +import se.yolean.kafka.topic.client.retryable.BrokerProbe.KafkaStatus; import se.yolean.kafka.topic.client.retryable.RestProxySetup; +import se.yolean.kafka.topic.client.retryable.RestProxySetup.EndpointsStatus; import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup; -import se.yolean.kafka.topic.client.config.ExecutorRetryProviderForInit; +import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup.AdminSchemaStatus; public class ManagedTopicsService { @@ -35,15 +35,32 @@ public ManagedTopicsService(Properties config) { MetricsModule.Exporter exporter = initContext.getInstance(MetricsModule.Exporter.class); log.info("Metrics exporter", "status", exporter.getStatus(), "port", exporter.getHttpPort()); + final RetryExecutor tasks = initContext.getInstance(RetryExecutor.class); + BrokerProbe brokerProbe = initContext.getInstance(BrokerProbe.class); - SchemaRegistrySetup shemaRegistry = initContext.getInstance(SchemaRegistrySetup.class); + // How to execute a task depends on concurrency ambitions, + // with plain Kafka API impls actually more suitable for a dedicated thread + // and long configured timeouts in this service. + // On the other hand, short timeouts (aborting Kafka clients' own retry+backoff) + // enables concurrency with other tasks such as REST-based + CompletableFuture brokers = tasks.getWithRetry(brokerProbe); - RestProxySetup restProxy = initContext.getInstance(RestProxySetup.class); + SchemaRegistrySetup schemaRegistry = initContext.getInstance(SchemaRegistrySetup.class); + CompletableFuture schemas = tasks.getWithRetry(schemaRegistry); - initContext.getInstance(ExecutorRetryProviderForInit.class); - RetryExecutor ex = null; - ex.getWithRetry(() -> new String()); + brokers.thenAcceptBoth(schemas, (KafkaStatus s, AdminSchemaStatus i) -> { + + log.info("Both kafka and schema registry is ok, now create REST producer for declarations"); + RestProxySetup restProxy = initContext.getInstance(RestProxySetup.class); + + CompletableFuture rest = tasks.getWithRetry(restProxy); + rest.thenAccept(endpoints -> { + log.info("REST endpoints also OK, let's start consuming topic declarations"); + log.warn("Big fat TODO"); + }); + + }); } String getHostname() { diff --git a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/AdminClientProvider.java similarity index 94% rename from src/main/java/se/yolean/kafka/topic/client/service/AdminClientProvider.java rename to src/main/java/se/yolean/kafka/topic/client/config/AdminClientProvider.java index 00b8261..cc7ea38 100644 --- a/src/main/java/se/yolean/kafka/topic/client/service/AdminClientProvider.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/AdminClientProvider.java @@ -1,4 +1,4 @@ -package se.yolean.kafka.topic.client.service; +package se.yolean.kafka.topic.client.config; import java.util.Properties; diff --git a/src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java deleted file mode 100644 index da8f118..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/config/CallExecutorProvider.java +++ /dev/null @@ -1,19 +0,0 @@ -package se.yolean.kafka.topic.client.config; - -import javax.inject.Provider; - -import com.evanlennick.retry4j.CallExecutor; - -public class CallExecutorProvider implements Provider { - - public CallExecutorProvider() { - // TODO Auto-generated constructor stub - } - - @Override - public CallExecutor get() { - // TODO Auto-generated method stub - return null; - } - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java deleted file mode 100644 index 7c65d28..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/config/ClusterDescriptionProvider.java +++ /dev/null @@ -1,37 +0,0 @@ -package se.yolean.kafka.topic.client.config; - -import java.util.Properties; -import java.util.Set; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.AdminClientConfig; -import org.apache.kafka.clients.admin.DescribeClusterResult; - -import se.yolean.kafka.topic.client.service.StoreInitializationException; - -public class ClusterDescriptionProvider { - - private String bootstrap; - - @Inject - public ClusterDescriptionProvider(@Named("config:bootstrap") String bootstrap) { - this.bootstrap = bootstrap; - } - - public void get() { - Properties props = new Properties(); - props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrap); - - AdminClient admin = AdminClient.create(props); - - DescribeClusterResult describeCluster = admin.describeCluster(); - - } - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java index 5710caa..b2cb11e 100644 --- a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java @@ -1,6 +1,5 @@ package se.yolean.kafka.topic.client.config; -import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java b/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java index 14920e3..6eb7f0e 100644 --- a/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java @@ -3,8 +3,6 @@ import java.io.PrintWriter; import java.io.StringWriter; import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import com.github.structlog4j.ILogger; diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java index 25db0d2..1f1ab81 100644 --- a/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/BrokerProbe.java @@ -1,6 +1,5 @@ package se.yolean.kafka.topic.client.retryable; -import java.time.Duration; import java.util.Collection; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/RestProxySetup.java b/src/main/java/se/yolean/kafka/topic/client/retryable/RestProxySetup.java index 0e9ae89..ae40f74 100644 --- a/src/main/java/se/yolean/kafka/topic/client/retryable/RestProxySetup.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/RestProxySetup.java @@ -1,15 +1,23 @@ package se.yolean.kafka.topic.client.retryable; +import java.util.concurrent.Callable; + import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; -public class RestProxySetup implements Runnable { +public class RestProxySetup implements Callable { private final ILogger log = SLoggerFactory.getLogger(this.getClass()); @Override - public void run() { + public EndpointsStatus call() throws Exception { log.warn("TODO set up REST endpoint for topic creation"); + return new EndpointsStatus(); + } + + public static class EndpointsStatus { } + + } diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/SchemaRegistrySetup.java b/src/main/java/se/yolean/kafka/topic/client/retryable/SchemaRegistrySetup.java index c157069..2ed2c8c 100644 --- a/src/main/java/se/yolean/kafka/topic/client/retryable/SchemaRegistrySetup.java +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/SchemaRegistrySetup.java @@ -1,15 +1,21 @@ package se.yolean.kafka.topic.client.retryable; +import java.util.concurrent.Callable; + import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; -public class SchemaRegistrySetup implements Runnable { +public class SchemaRegistrySetup implements Callable { private final ILogger log = SLoggerFactory.getLogger(this.getClass()); @Override - public void run() { - log.warn("TODO set up REST endpoint for topic creation"); + public AdminSchemaStatus call() throws Exception { + log.warn("TODO idempotent conf of admin schema"); + return new AdminSchemaStatus(); + } + + public static class AdminSchemaStatus { } } From 790363469e91038b3bd83f9b66c40c87d988d1bb Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Fri, 1 Dec 2017 06:09:41 +0100 Subject: [PATCH 21/23] Wanted to start, but more importantly stop, the service from a test ... but failed. --- .../client/cli/ManagedTopicsServiceTest.java | 27 ++++++++++++- .../yolean/kafka/topic/client/cli/Client.java | 4 +- .../client/cli/ManagedTopicsService.java | 40 ++++++++++++++++--- .../client/config/ConcurrencyModule.java | 16 ++++++++ ...gerConfigModule.java => ConfigModule.java} | 13 +++--- .../config/ExecutorServiceProvider.java | 2 - .../client/config/ManagerInitModule.java | 14 +++++++ .../topic/client/config/MetricsModule.java | 1 - ...gModuleTest.java => ConfigModuleTest.java} | 4 +- 9 files changed, 100 insertions(+), 21 deletions(-) create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java rename src/main/java/se/yolean/kafka/topic/client/config/{ManagerConfigModule.java => ConfigModule.java} (60%) create mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ManagerInitModule.java rename src/test/java/se/yolean/kafka/topic/client/config/{ManagerConfigModuleTest.java => ConfigModuleTest.java} (85%) diff --git a/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java b/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java index faea413..5f98e57 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java @@ -1,13 +1,38 @@ package se.yolean.kafka.topic.client.cli; -import static org.junit.Assert.*; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.util.Properties; import org.junit.Test; public class ManagedTopicsServiceTest { + private Properties getProperties(String... props) { + Properties config = new Properties(); + for (String path : props) { + try { + Reader source = new FileReader(path); + config.load(source); + } catch (FileNotFoundException e) { + throw new RuntimeException(path, e); + } catch (IOException e) { + throw new RuntimeException(path, e); + } + } + return config; + } + @Test public void testManagedTopicsService() { + Properties config = getProperties( + "src/main/resources/default.properties", + "src/itest/resources/itest-dockercompose.properties" + ); + ManagedTopicsService service = new ManagedTopicsService(config); + service.start(); } } diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java index 69449a9..86283be 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/Client.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/Client.java @@ -35,7 +35,7 @@ static ClassLoader getClassLoaderForDefaults() { return Client.class.getClassLoader(); } - static ManagedTopicsService managerStart(String managerPropertiesPath) { + static void managerStart(String managerPropertiesPath) { Properties properties = new Properties(); InputStream defaultProperties = getClassLoaderForDefaults().getResourceAsStream(DEFAULT_PROPERTIES_FILE); if (defaultProperties == null) { @@ -66,7 +66,7 @@ static ManagedTopicsService managerStart(String managerPropertiesPath) { throw new RuntimeException("Failed to read properties file " + managerPropertiesPath, e); } - return new ManagedTopicsService(properties); + new ManagedTopicsService(properties).start(); } public static void main(String[] args) throws Exception { diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java index b71dd72..b8bf5f4 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java @@ -4,6 +4,8 @@ import java.net.UnknownHostException; import java.util.Properties; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; @@ -11,7 +13,9 @@ import com.google.inject.Injector; import com.nurkiewicz.asyncretry.RetryExecutor; -import se.yolean.kafka.topic.client.config.ManagerConfigModule; +import se.yolean.kafka.topic.client.config.ConcurrencyModule; +import se.yolean.kafka.topic.client.config.ConfigModule; +import se.yolean.kafka.topic.client.config.ManagerInitModule; import se.yolean.kafka.topic.client.config.MetricsModule; import se.yolean.kafka.topic.client.retryable.BrokerProbe; import se.yolean.kafka.topic.client.retryable.BrokerProbe.KafkaStatus; @@ -20,15 +24,31 @@ import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup; import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup.AdminSchemaStatus; -public class ManagedTopicsService { +public class ManagedTopicsService implements Runnable { public final ILogger log = SLoggerFactory.getLogger(this.getClass()); + private final Injector serviceContext; + public ManagedTopicsService(Properties config) { - log.info("Starting Topic Manager Service", "hostname", getHostname()); + serviceContext = Guice.createInjector(new ConfigModule(config), new ConcurrencyModule()); + } - Injector initContext = Guice.createInjector( - new ManagerConfigModule(config), + public void start() { + //log.info("Starting Topic Manager Service", "hostname", getHostname()); + run(); + } + + public void stop() { + log.warn("TODO shutdown not implemented"); + } + + @Override + public void run() { + log.info("Running Topic Manager Service"); + + Injector initContext = serviceContext.createChildInjector( + new ManagerInitModule(), new MetricsModule() ); @@ -61,6 +81,16 @@ public ManagedTopicsService(Properties config) { }); }); + + while (true) { + // we need to wait for the management loop here, but it can't start until the above has completed + log.debug("Somewhere here we'll be repeating the topic management loop"); + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + log.info("Exiting"); + } + } } String getHostname() { diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java b/src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java new file mode 100644 index 0000000..55ef181 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java @@ -0,0 +1,16 @@ +package se.yolean.kafka.topic.client.config; + +import java.util.concurrent.ScheduledExecutorService; + +import com.google.inject.AbstractModule; +import com.nurkiewicz.asyncretry.RetryExecutor; + +public class ConcurrencyModule extends AbstractModule { + + @Override + protected void configure() { + bind(ScheduledExecutorService.class).toProvider(ExecutorServiceProvider.class); + bind(RetryExecutor.class).toProvider(ExecutorRetryProviderForInit.class); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java b/src/main/java/se/yolean/kafka/topic/client/config/ConfigModule.java similarity index 60% rename from src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java rename to src/main/java/se/yolean/kafka/topic/client/config/ConfigModule.java index 6eb7f0e..4244ef4 100644 --- a/src/main/java/se/yolean/kafka/topic/client/config/ManagerConfigModule.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/ConfigModule.java @@ -3,21 +3,21 @@ import java.io.PrintWriter; import java.io.StringWriter; import java.util.Properties; -import java.util.concurrent.ScheduledExecutorService; + +import org.apache.kafka.clients.admin.AdminClient; import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; import com.google.inject.AbstractModule; import com.google.inject.name.Names; -import com.nurkiewicz.asyncretry.RetryExecutor; -public class ManagerConfigModule extends AbstractModule { +public class ConfigModule extends AbstractModule { private final ILogger log = SLoggerFactory.getLogger(this.getClass()); private Properties config; - public ManagerConfigModule(Properties config) { + public ConfigModule(Properties config) { this.config = config; logConfigValues(); } @@ -25,15 +25,12 @@ public ManagerConfigModule(Properties config) { void logConfigValues() { StringWriter writer = new StringWriter(); config.list(new PrintWriter(writer)); - log.info("Topic Manager config: " + writer.getBuffer().toString()); + log.info("Instance config: " + writer.getBuffer().toString()); } @Override protected void configure() { Names.bindProperties(super.binder(), this.config); - - bind(ScheduledExecutorService.class).toProvider(ExecutorServiceProvider.class); - bind(RetryExecutor.class).toProvider(ExecutorRetryProviderForInit.class); } } diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java index 4741084..e6326c4 100644 --- a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorServiceProvider.java @@ -19,8 +19,6 @@ public ScheduledExecutorService get() { if (shared == null) { log.info("Creating new executor"); shared = Executors.newSingleThreadScheduledExecutor(); - } else { - log.warn("Reusing shared executor instance"); } return shared; } diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ManagerInitModule.java b/src/main/java/se/yolean/kafka/topic/client/config/ManagerInitModule.java new file mode 100644 index 0000000..db778ee --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/config/ManagerInitModule.java @@ -0,0 +1,14 @@ +package se.yolean.kafka.topic.client.config; + +import org.apache.kafka.clients.admin.AdminClient; + +import com.google.inject.AbstractModule; + +public class ManagerInitModule extends AbstractModule { + + @Override + protected void configure() { + bind(AdminClient.class).toProvider(AdminClientProvider.class); + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java b/src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java index 24c70fd..6d3a3b7 100644 --- a/src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/MetricsModule.java @@ -29,7 +29,6 @@ public Exporter get() { } catch (IOException e) { throw new RuntimeException("Failed to start metrics exporter on port " + port, e); } - bind(HTTPServer.class).toInstance(server); return new Exporter() { diff --git a/src/test/java/se/yolean/kafka/topic/client/config/ManagerConfigModuleTest.java b/src/test/java/se/yolean/kafka/topic/client/config/ConfigModuleTest.java similarity index 85% rename from src/test/java/se/yolean/kafka/topic/client/config/ManagerConfigModuleTest.java rename to src/test/java/se/yolean/kafka/topic/client/config/ConfigModuleTest.java index 0ac0713..66cbbb5 100644 --- a/src/test/java/se/yolean/kafka/topic/client/config/ManagerConfigModuleTest.java +++ b/src/test/java/se/yolean/kafka/topic/client/config/ConfigModuleTest.java @@ -12,13 +12,13 @@ import com.google.inject.Guice; import com.google.inject.Injector; -public class ManagerConfigModuleTest { +public class ConfigModuleTest { @Test public void test() { Properties props = new Properties(); props.setProperty("bootstrap.servers", "PLAINTEXT://my-test-value:9092"); - Injector injector = Guice.createInjector(new ManagerConfigModule(props)); + Injector injector = Guice.createInjector(new ConfigModule(props)); TestService1 t1 = injector.getInstance(TestService1.class); assertEquals("PLAINTEXT://my-test-value:9092", t1.boostrapServers); } From e0057a2fcbd9793ef53a6b0069d510f8de6663d5 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Fri, 1 Dec 2017 08:17:53 +0100 Subject: [PATCH 22/23] Shares the executor service with the retry lib --- .../topic/client/cli/ManagedTopicsService.java | 4 ++-- .../config/ExecutorRetryProviderForInit.java | 14 ++++++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java index b8bf5f4..91e033c 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java @@ -11,7 +11,7 @@ import com.github.structlog4j.SLoggerFactory; import com.google.inject.Guice; import com.google.inject.Injector; -import com.nurkiewicz.asyncretry.RetryExecutor; +import com.nurkiewicz.asyncretry.AsyncRetryExecutor; import se.yolean.kafka.topic.client.config.ConcurrencyModule; import se.yolean.kafka.topic.client.config.ConfigModule; @@ -55,7 +55,7 @@ public void run() { MetricsModule.Exporter exporter = initContext.getInstance(MetricsModule.Exporter.class); log.info("Metrics exporter", "status", exporter.getStatus(), "port", exporter.getHttpPort()); - final RetryExecutor tasks = initContext.getInstance(RetryExecutor.class); + final AsyncRetryExecutor tasks = initContext.getInstance(AsyncRetryExecutor.class); BrokerProbe brokerProbe = initContext.getInstance(BrokerProbe.class); diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java index b2cb11e..46682b6 100644 --- a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java @@ -3,16 +3,22 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; +import javax.inject.Inject; import javax.inject.Provider; import com.nurkiewicz.asyncretry.AsyncRetryExecutor; -import com.nurkiewicz.asyncretry.RetryExecutor; -public class ExecutorRetryProviderForInit implements Provider { +public class ExecutorRetryProviderForInit implements Provider { + + private ScheduledExecutorService concurrency; + + @Inject + public ExecutorRetryProviderForInit(ScheduledExecutorService concurrency) { + this.concurrency = concurrency; + } @Override - public RetryExecutor get() { - ScheduledExecutorService concurrency = Executors.newSingleThreadScheduledExecutor(); + public AsyncRetryExecutor get() { AsyncRetryExecutor executor = new AsyncRetryExecutor(concurrency) //.retryOn(Throwable.class) .withExponentialBackoff(500, 2) //500ms times 2 after each retry From 1fb1e0bfc678291bc56ee21d966d38ee4aba93d5 Mon Sep 17 00:00:00 2001 From: Staffan Olsson Date: Fri, 1 Dec 2017 08:58:46 +0100 Subject: [PATCH 23/23] Runs in sequence for now, until we're more than a consumer --- .../client/cli/ManagedTopicsServiceTest.java | 1 + .../client/cli/ManagedTopicsService.java | 75 +++++++++---------- .../client/config/ConcurrencyModule.java | 10 ++- .../config/ExecutorRetryProviderForInit.java | 31 -------- .../kafka/topic/client/retryable/Task.java | 15 ---- .../client/retryable/TopicCreateOrVerify.java | 7 ++ .../retryable/TopicOperationResult.java | 5 ++ .../TopicDeclarationsPollModule.java | 24 ++++++ .../topic/manager/tt/TopicsTopicWatcher.java | 29 +++++++ src/main/resources/default.properties | 7 +- 10 files changed, 115 insertions(+), 89 deletions(-) delete mode 100644 src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java delete mode 100644 src/main/java/se/yolean/kafka/topic/client/retryable/Task.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/retryable/TopicCreateOrVerify.java create mode 100644 src/main/java/se/yolean/kafka/topic/client/retryable/TopicOperationResult.java create mode 100644 src/main/java/se/yolean/kafka/topic/manager/configure/TopicDeclarationsPollModule.java create mode 100644 src/main/java/se/yolean/kafka/topic/manager/tt/TopicsTopicWatcher.java diff --git a/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java b/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java index 5f98e57..7409bcd 100644 --- a/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java +++ b/src/itest/java/se/yolean/kafka/topic/client/cli/ManagedTopicsServiceTest.java @@ -31,6 +31,7 @@ public void testManagedTopicsService() { "src/main/resources/default.properties", "src/itest/resources/itest-dockercompose.properties" ); + config.setProperty("topic.declarations.consumer.polls.max", Integer.toString(3)); ManagedTopicsService service = new ManagedTopicsService(config); service.start(); } diff --git a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java index 91e033c..5a54005 100644 --- a/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java +++ b/src/main/java/se/yolean/kafka/topic/client/cli/ManagedTopicsService.java @@ -3,26 +3,20 @@ import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Properties; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; import com.github.structlog4j.ILogger; import com.github.structlog4j.SLoggerFactory; import com.google.inject.Guice; import com.google.inject.Injector; -import com.nurkiewicz.asyncretry.AsyncRetryExecutor; -import se.yolean.kafka.topic.client.config.ConcurrencyModule; import se.yolean.kafka.topic.client.config.ConfigModule; import se.yolean.kafka.topic.client.config.ManagerInitModule; import se.yolean.kafka.topic.client.config.MetricsModule; import se.yolean.kafka.topic.client.retryable.BrokerProbe; -import se.yolean.kafka.topic.client.retryable.BrokerProbe.KafkaStatus; import se.yolean.kafka.topic.client.retryable.RestProxySetup; -import se.yolean.kafka.topic.client.retryable.RestProxySetup.EndpointsStatus; import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup; -import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup.AdminSchemaStatus; +import se.yolean.kafka.topic.manager.configure.TopicDeclarationsPollModule; +import se.yolean.kafka.topic.manager.tt.TopicsTopicWatcher; public class ManagedTopicsService implements Runnable { @@ -31,16 +25,20 @@ public class ManagedTopicsService implements Runnable { private final Injector serviceContext; public ManagedTopicsService(Properties config) { - serviceContext = Guice.createInjector(new ConfigModule(config), new ConcurrencyModule()); + serviceContext = Guice.createInjector( + // ny async or retry behavior now, so configure long timeouts instead + //new ConcurrencyModule(), + new ConfigModule(config) + ); } public void start() { - //log.info("Starting Topic Manager Service", "hostname", getHostname()); + log.info("Starting Topic Manager Service without concurrency", "hostname", getHostname()); run(); } public void stop() { - log.warn("TODO shutdown not implemented"); + log.warn("TODO shutdown not implemented. Send termination signals or configure topic.declarations.consumer.polls.max."); } @Override @@ -55,42 +53,41 @@ public void run() { MetricsModule.Exporter exporter = initContext.getInstance(MetricsModule.Exporter.class); log.info("Metrics exporter", "status", exporter.getStatus(), "port", exporter.getHttpPort()); - final AsyncRetryExecutor tasks = initContext.getInstance(AsyncRetryExecutor.class); - BrokerProbe brokerProbe = initContext.getInstance(BrokerProbe.class); - - // How to execute a task depends on concurrency ambitions, - // with plain Kafka API impls actually more suitable for a dedicated thread - // and long configured timeouts in this service. - // On the other hand, short timeouts (aborting Kafka clients' own retry+backoff) - // enables concurrency with other tasks such as REST-based - CompletableFuture brokers = tasks.getWithRetry(brokerProbe); + BrokerProbe.KafkaStatus status; + try { + status = brokerProbe.call(); + } catch (Exception e) { + throw new RuntimeException("unhandled", e); + } SchemaRegistrySetup schemaRegistry = initContext.getInstance(SchemaRegistrySetup.class); - CompletableFuture schemas = tasks.getWithRetry(schemaRegistry); + SchemaRegistrySetup.AdminSchemaStatus schemas; + try { + schemas = schemaRegistry.call(); + } catch (Exception e) { + throw new RuntimeException("unhandled", e); + } - brokers.thenAcceptBoth(schemas, (KafkaStatus s, AdminSchemaStatus i) -> { + log.info("Both kafka and schema registry is ok, now create REST producer for declarations"); + RestProxySetup restProxy = initContext.getInstance(RestProxySetup.class); - log.info("Both kafka and schema registry is ok, now create REST producer for declarations"); - RestProxySetup restProxy = initContext.getInstance(RestProxySetup.class); + RestProxySetup.EndpointsStatus rest; + try { + rest = restProxy.call(); + } catch (Exception e) { + throw new RuntimeException("unhandled", e); + } - CompletableFuture rest = tasks.getWithRetry(restProxy); - rest.thenAccept(endpoints -> { - log.info("REST endpoints also OK, let's start consuming topic declarations"); - log.warn("Big fat TODO"); - }); + log.info("REST endpoints also OK, let's start consuming topic declarations"); - }); + Injector managerContext = initContext.createChildInjector( + new TopicDeclarationsPollModule(status, schemas, rest)); - while (true) { - // we need to wait for the management loop here, but it can't start until the above has completed - log.debug("Somewhere here we'll be repeating the topic management loop"); - try { - Thread.sleep(5000); - } catch (InterruptedException e) { - log.info("Exiting"); - } - } + + TopicsTopicWatcher watch = managerContext.getInstance(TopicsTopicWatcher.class); + log.info("Handing control over to topic declarations poll loop", "impl", watch.getClass()); + watch.run(); } String getHostname() { diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java b/src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java index 55ef181..8dbfb0d 100644 --- a/src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java +++ b/src/main/java/se/yolean/kafka/topic/client/config/ConcurrencyModule.java @@ -3,14 +3,20 @@ import java.util.concurrent.ScheduledExecutorService; import com.google.inject.AbstractModule; -import com.nurkiewicz.asyncretry.RetryExecutor; public class ConcurrencyModule extends AbstractModule { @Override protected void configure() { + // Using Scheduled because the retry lib depend(s|ed) on it bind(ScheduledExecutorService.class).toProvider(ExecutorServiceProvider.class); - bind(RetryExecutor.class).toProvider(ExecutorRetryProviderForInit.class); + + // I'm not so sure we should use this lib... + // Our callables have quite different characteristics, + // with kafka libs doing back-off within given timeouts. + // And we can probably do simple retries, while managing concurrency as suggested in + // http://winterbe.com/posts/2015/04/07/java8-concurrency-tutorial-thread-executor-examples/ + //bind(com.nurkiewicz.asyncretry.AsyncRetryExecutor.class).toProvider(ExecutorRetryProviderForInit.class); } } diff --git a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java b/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java deleted file mode 100644 index 46682b6..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/config/ExecutorRetryProviderForInit.java +++ /dev/null @@ -1,31 +0,0 @@ -package se.yolean.kafka.topic.client.config; - -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; - -import javax.inject.Inject; -import javax.inject.Provider; - -import com.nurkiewicz.asyncretry.AsyncRetryExecutor; - -public class ExecutorRetryProviderForInit implements Provider { - - private ScheduledExecutorService concurrency; - - @Inject - public ExecutorRetryProviderForInit(ScheduledExecutorService concurrency) { - this.concurrency = concurrency; - } - - @Override - public AsyncRetryExecutor get() { - AsyncRetryExecutor executor = new AsyncRetryExecutor(concurrency) - //.retryOn(Throwable.class) - .withExponentialBackoff(500, 2) //500ms times 2 after each retry - .withMaxDelay(10_000) //10 seconds - .withUniformJitter() //add between +/- 100 ms randomly - .withMaxRetries(20); - return executor; - } - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/Task.java b/src/main/java/se/yolean/kafka/topic/client/retryable/Task.java deleted file mode 100644 index fddbdc2..0000000 --- a/src/main/java/se/yolean/kafka/topic/client/retryable/Task.java +++ /dev/null @@ -1,15 +0,0 @@ -package se.yolean.kafka.topic.client.retryable; - -import java.util.concurrent.Callable; - -/** - * Anything that might need retries or benefit from concurrency - * would have to be wrapped anyway, so let's have this task abstraction. - * - * @param The type returned by the actual {@link Callable} - * - * @deprecated Use {@link Callable} or {@link Runnable} depending on use case. - */ -public interface Task extends Callable { - -} diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/TopicCreateOrVerify.java b/src/main/java/se/yolean/kafka/topic/client/retryable/TopicCreateOrVerify.java new file mode 100644 index 0000000..58ea298 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/TopicCreateOrVerify.java @@ -0,0 +1,7 @@ +package se.yolean.kafka.topic.client.retryable; + +import java.util.concurrent.Callable; + +public interface TopicCreateOrVerify extends Callable { + +} diff --git a/src/main/java/se/yolean/kafka/topic/client/retryable/TopicOperationResult.java b/src/main/java/se/yolean/kafka/topic/client/retryable/TopicOperationResult.java new file mode 100644 index 0000000..6a6d982 --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/client/retryable/TopicOperationResult.java @@ -0,0 +1,5 @@ +package se.yolean.kafka.topic.client.retryable; + +public interface TopicOperationResult { + +} diff --git a/src/main/java/se/yolean/kafka/topic/manager/configure/TopicDeclarationsPollModule.java b/src/main/java/se/yolean/kafka/topic/manager/configure/TopicDeclarationsPollModule.java new file mode 100644 index 0000000..4a4a92b --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/manager/configure/TopicDeclarationsPollModule.java @@ -0,0 +1,24 @@ +package se.yolean.kafka.topic.manager.configure; + +import com.google.inject.AbstractModule; + +import se.yolean.kafka.topic.client.retryable.BrokerProbe; +import se.yolean.kafka.topic.client.retryable.RestProxySetup; +import se.yolean.kafka.topic.client.retryable.SchemaRegistrySetup; + +public class TopicDeclarationsPollModule extends AbstractModule { + + public TopicDeclarationsPollModule( + BrokerProbe.KafkaStatus initResult, + SchemaRegistrySetup.AdminSchemaStatus adminSchemaStatus, + RestProxySetup.EndpointsStatus restEndpointsStatus + ) { + } + + @Override + protected void configure() { + // TODO Auto-generated method stub + + } + +} diff --git a/src/main/java/se/yolean/kafka/topic/manager/tt/TopicsTopicWatcher.java b/src/main/java/se/yolean/kafka/topic/manager/tt/TopicsTopicWatcher.java new file mode 100644 index 0000000..4f72e1f --- /dev/null +++ b/src/main/java/se/yolean/kafka/topic/manager/tt/TopicsTopicWatcher.java @@ -0,0 +1,29 @@ +package se.yolean.kafka.topic.manager.tt; + +import javax.inject.Inject; +import javax.inject.Named; + +import com.github.structlog4j.ILogger; +import com.github.structlog4j.SLoggerFactory; + +public class TopicsTopicWatcher implements Runnable { + + private final ILogger log = SLoggerFactory.getLogger(this.getClass()); + + @Inject + @Named("topic.declarations.consumer.polls.max") + private int pollsMax; + + @Override + public void run() { + for (int i = 0; pollsMax == -1 || i < pollsMax; i++) { + log.debug("Here we'll be repeating the topic management loop"); + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + throw new RuntimeException("ouch", e); + } + } + } + +} diff --git a/src/main/resources/default.properties b/src/main/resources/default.properties index 6f6195b..bdcaae3 100644 --- a/src/main/resources/default.properties +++ b/src/main/resources/default.properties @@ -4,5 +4,8 @@ prometheus.exporter.port=5000 brokers.describe.available.min=1 # AdminClient timeouts, see BrokerProbeIntegrationTest for details -brokers.describe.timeout.ms=201 -brokers.describe.get.timeout.ms=202 +brokers.describe.timeout.ms=25001 +brokers.describe.get.timeout.ms=25002 + +# For testing. Default is to keep polling forever +topic.declarations.consumer.polls.max=-1