diff --git a/README.md b/README.md index 0504a9bd..91688f9d 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,13 @@ You can add streams-bootstrap via Maven Central. #### Gradle ```gradle -compile group: 'com.bakdata.kafka', name: 'streams-bootstrap', version: '2.1.1' +implementation group: 'com.bakdata.kafka', name: 'streams-bootstrap-cli', version: '3.0.0' +``` + +With Kotlin DSL + +```gradle +implementation(group = "com.bakdata.kafka", name = "streams-bootstrap-cli", version = "3.0.0") ``` #### Maven @@ -35,8 +41,8 @@ compile group: 'com.bakdata.kafka', name: 'streams-bootstrap', version: '2.1.1' com.bakdata.kafka - streams-bootstrap - 2.1.1 + streams-bootstrap-cli + 3.0.0 ``` @@ -52,8 +58,10 @@ and `getUniqueAppId()`. You can define the topology of your application in `buil ```java import com.bakdata.kafka.KafkaStreamsApplication; -import java.util.Properties; -import org.apache.kafka.streams.StreamsBuilder; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; +import java.util.Map; import org.apache.kafka.streams.kstream.KStream; public class StreamsBootstrapApplication extends KafkaStreamsApplication { @@ -62,26 +70,30 @@ public class StreamsBootstrapApplication extends KafkaStreamsApplication { } @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = - builder.stream(this.getInputTopics()); + public StreamsApp createApp(final boolean cleanUp) { + return new StreamsApp() { + @Override + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); - // your topology + // your topology - input.to(this.getOutputTopic()); - } - - @Override - public String getUniqueAppId() { - return "streams-bootstrap-app"; - } + input.to(builder.getTopics().getOutputTopic()); + } - // Optionally you can override the default streams bootstrap Kafka properties - @Override - protected Properties createKafkaProperties() { - final Properties kafkaProperties = super.createKafkaProperties(); + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + return "streams-bootstrap-app-" + topics.getOutputTopic(); + } - return kafkaProperties; + // Optionally you can define custom Kafka properties + @Override + public Map createKafkaProperties() { + return Map.of( + // your config + ); + } + }; } } ``` @@ -92,6 +104,8 @@ The following configuration options are available: - `--schema-registry-url`: The URL of the Schema Registry +- `--kafka-config`: Kafka Streams configuration (`[,...]`) + - `--input-topics`: List of input topics (comma-separated) - `--input-pattern`: Pattern of input topics @@ -100,8 +114,6 @@ The following configuration options are available: - `--error-topic`: A topic to write errors to -- `--streams-config`: Kafka Streams configuration (`[,...]`) - - `--extra-input-topics`: Additional named input topics if you need to specify multiple topics with different message types (`[,...]`) @@ -113,13 +125,15 @@ The following configuration options are available: - `--volatile-group-instance-id`: Whether the group instance id is volatile, i.e., it will change on a Streams shutdown. -- `--clean-up`: Whether the state of the Kafka Streams app, i.e., offsets and state stores and auto-created topics, - should be cleared instead of running the app +- `--debug`: Configure logging to debug + +Additionally, the following commands are available: -- `--delete-output`: Whether the output topics with their associated schemas and the consumer group should be deleted - during the cleanup +- `clean`: Reset the Kafka Streams application. Additionally, delete the consumer group and all output and intermediate + topics associated with the Kafka Streams application. -- `--debug`: Configure logging to debug +- `reset`: Clear all state stores, consumer group offsets, and internal topics associated with the Kafka Streams + application. #### Kafka producer @@ -127,8 +141,11 @@ Create a subclass of `KafkaProducerApplication`. ```java import com.bakdata.kafka.KafkaProducerApplication; -import java.util.Properties; -import org.apache.kafka.clients.producer.KafkaProducer; +import com.bakdata.kafka.ProducerApp; +import com.bakdata.kafka.ProducerBuilder; +import com.bakdata.kafka.ProducerRunnable; +import java.util.Map; +import org.apache.kafka.clients.producer.Producer; public class StreamsBootstrapApplication extends KafkaProducerApplication { public static void main(final String[] args) { @@ -136,18 +153,25 @@ public class StreamsBootstrapApplication extends KafkaProducerApplication { } @Override - protected void runApplication() { - try (final KafkaProducer producer = this.createProducer()) { - // your producer + public ProducerApp createApp(final boolean cleanUp) { + return new ProducerApp() { + @Override + public ProducerRunnable buildRunnable(final ProducerBuilder builder) { + return () -> { + try (final Producer producer = builder.createProducer()) { + // your producer + } + }; } - } - - // Optionally you can override the default streams bootstrap Kafka properties - @Override - protected Properties createKafkaProperties() { - final Properties kafkaProperties = super.createKafkaProperties(); - return kafkaProperties; + // Optionally you can define custom Kafka properties + @Override + public Map createKafkaProperties() { + return Map.of( + // your config + ); + } + }; } } ``` @@ -158,17 +182,18 @@ The following configuration options are available: - `--schema-registry-url`: The URL of the Schema Registry -- `--output-topic`: The output topic +- `--kafka-config`: Kafka producer configuration (`[,...]`) -- `--streams-config`: Kafka producer configuration (`[,...]`) +- `--output-topic`: The output topic - `--extra-output-topics`: Additional named output topics (`String=String>[,...]`) -- `--clean-up`: Whether the output topics and associated schemas of the producer app should be deleted instead of - running the app - - `--debug`: Configure logging to debug +Additionally, the following commands are available: + +- `clean`: Delete all output topics associated with the Kafka Producer application. + ### Helm Charts For the configuration and deployment to Kubernetes, you can use diff --git a/build.gradle.kts b/build.gradle.kts index 693286fe..5cfa071e 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -1,7 +1,7 @@ plugins { id("com.bakdata.release") version "1.4.0" id("com.bakdata.sonar") version "1.4.0" - id("com.bakdata.sonatype") version "1.4.0" + id("com.bakdata.sonatype") version "1.4.1" id("io.freefair.lombok") version "8.4" } @@ -16,6 +16,7 @@ allprojects { repositories { mavenCentral() maven(url = "https://packages.confluent.io/maven/") + maven(url = "https://s01.oss.sonatype.org/content/repositories/snapshots") } } diff --git a/charts/producer-app-cleanup-job/templates/job.yaml b/charts/producer-app-cleanup-job/templates/job.yaml index 94d0e5f3..65f11c2d 100644 --- a/charts/producer-app-cleanup-job/templates/job.yaml +++ b/charts/producer-app-cleanup-job/templates/job.yaml @@ -55,6 +55,8 @@ spec: imagePullPolicy: "{{ .Values.imagePullPolicy }}" resources: {{ toYaml .Values.resources | indent 12 }} + args: + - clean env: - name: ENV_PREFIX value: {{ .Values.configurationEnvPrefix }}_ @@ -74,8 +76,6 @@ spec: - name: "{{ .Values.configurationEnvPrefix }}_DEBUG" value: {{ .Values.debug | quote }} {{- end }} - - name: "{{ .Values.configurationEnvPrefix }}_CLEAN_UP" - value: "true" {{- if hasKey .Values.streams "outputTopic" }} - name: "{{ .Values.configurationEnvPrefix }}_OUTPUT_TOPIC" value: {{ .Values.streams.outputTopic | quote }} diff --git a/charts/producer-app/README.md b/charts/producer-app/README.md index daf5a657..2366b8db 100644 --- a/charts/producer-app/README.md +++ b/charts/producer-app/README.md @@ -47,7 +47,7 @@ Alternatively, a YAML file that specifies the values for the parameters can be p ### Streams | Parameter | Description | Default | -| --------------------------- | ---------------------------------------------------------------------------------------------------------- | ------- | +|-----------------------------|------------------------------------------------------------------------------------------------------------|---------| | `streams.brokers` | Comma separated list of Kafka brokers to connect to. | | | `streams.schemaRegistryUrl` | URL of Schema Registry to connect to. | `null` | | `streams.config` | Configurations for your [Kafka producer app](https://kafka.apache.org/documentation/#producerconfigs). | `{}` | diff --git a/charts/streams-app-cleanup-job/templates/job.yaml b/charts/streams-app-cleanup-job/templates/job.yaml index e2e315c4..7b19f207 100644 --- a/charts/streams-app-cleanup-job/templates/job.yaml +++ b/charts/streams-app-cleanup-job/templates/job.yaml @@ -55,6 +55,12 @@ spec: imagePullPolicy: "{{ .Values.imagePullPolicy }}" resources: {{ toYaml .Values.resources | indent 12 }} + args: + {{- if .Values.streams.deleteOutput }} + - clean + {{- else }} + - reset + {{- end }} env: - name: ENV_PREFIX value: {{ .Values.configurationEnvPrefix }}_ @@ -70,20 +76,10 @@ spec: - name: "{{ .Values.configurationEnvPrefix }}_SCHEMA_REGISTRY_URL" value: {{ .Values.streams.schemaRegistryUrl | quote }} {{- end }} - {{- if hasKey .Values.streams "productive" }} - - name: "{{ .Values.configurationEnvPrefix }}_PRODUCTIVE" - value: {{ .Values.streams.productive | quote }} - {{- end }} {{- if hasKey .Values "debug" }} - name: "{{ .Values.configurationEnvPrefix }}_DEBUG" value: {{ .Values.debug | quote }} {{- end }} - - name: "{{ .Values.configurationEnvPrefix }}_CLEAN_UP" - value: "true" - {{- if hasKey .Values.streams "deleteOutput" }} - - name: "{{ .Values.configurationEnvPrefix }}_DELETE_OUTPUT" - value: {{ .Values.streams.deleteOutput | quote }} - {{- end }} {{- if and (hasKey .Values.streams "inputTopics") (.Values.streams.inputTopics) }} - name: "{{ .Values.configurationEnvPrefix }}_INPUT_TOPICS" value: {{ .Values.streams.inputTopics | join "," | quote }} diff --git a/charts/streams-app-cleanup-job/values.yaml b/charts/streams-app-cleanup-job/values.yaml index 19426361..b3464e2a 100644 --- a/charts/streams-app-cleanup-job/values.yaml +++ b/charts/streams-app-cleanup-job/values.yaml @@ -36,7 +36,6 @@ streams: extraOutputTopics: {} # role: output # errorTopic: error -# productive: true deleteOutput: false commandLine: {} diff --git a/charts/streams-app/README.md b/charts/streams-app/README.md index 4698348c..77fa5803 100644 --- a/charts/streams-app/README.md +++ b/charts/streams-app/README.md @@ -50,21 +50,20 @@ Alternatively, a YAML file that specifies the values for the parameters can be p ### Streams -| Parameter | Description | Default | -| ------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | -| `streams.brokers` | Comma separated list of Kafka brokers to connect to. | | -| `streams.schemaRegistryUrl` | URL of Schema Registry to connect to. | `null` | -| `streams.staticMembership` | Whether to use [Kafka Static Group Membership](https://cwiki.apache.org/confluence/display/KAFKA/KIP-345%3A+Introduce+static+membership+protocol+to+reduce+consumer+rebalances). | `false` | -| `streams.optimizeLeaveGroupBehavior` | Enabling this optimizes the leave group behavior when a pod is terminated. Depends on the deployment kind, i.e., `statefulSet`. Requires the app to use streams-bootstrap 2.7+. | `true` | -| `streams.config` | Configurations for your [Kafka Streams app](https://kafka.apache.org/documentation/#streamsconfigs). | `{}` | -| `streams.inputTopics` | List of input topics for your streams application. | `[]` | -| `streams.extraInputTopics` | Map of additional named input topics if you need to specify multiple topics with different message types. | `{}` | -| `streams.inputPattern` | Input pattern of topics for your streams application. | | -| `streams.extraInputPatterns` | Map of additional named input patterns if you need to specify multiple topics with different message types. | `{}` | -| `streams.outputTopic` | Output topic for your streams application. | | -| `streams.extraOutputTopics` | Map of additional named output topics if you need to specify multiple topics with different message types. | `{}` | -| `streams.errorTopic` | Error topic for your streams application. | | -| `streams.productive` | Whether to use Kafka configuration values that are more suitable for production environments. | `true` | +| Parameter | Description | Default | +|------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| `streams.brokers` | Comma separated list of Kafka brokers to connect to. | | +| `streams.schemaRegistryUrl` | URL of Schema Registry to connect to. | `null` | +| `streams.staticMembership` | Whether to use [Kafka Static Group Membership](https://cwiki.apache.org/confluence/display/KAFKA/KIP-345%3A+Introduce+static+membership+protocol+to+reduce+consumer+rebalances). | `false` | +| `streams.config` | Configurations for your [Kafka Streams app](https://kafka.apache.org/documentation/#streamsconfigs). | `{}` | +| `streams.inputTopics` | List of input topics for your streams application. | `[]` | +| `streams.extraInputTopics` | Map of additional named input topics if you need to specify multiple topics with different message types. | `{}` | +| `streams.inputPattern` | Input pattern of topics for your streams application. | | +| `streams.extraInputPatterns` | Map of additional named input patterns if you need to specify multiple topics with different message types. | `{}` | +| `streams.outputTopic` | Output topic for your streams application. | | +| `streams.extraOutputTopics` | Map of additional named output topics if you need to specify multiple topics with different message types. | `{}` | +| `streams.errorTopic` | Error topic for your streams application. | | +| `streams.productive` | Whether to use Kafka configuration values that are more suitable for production environments. | `true` | ### Other diff --git a/charts/streams-app/templates/deployment.yaml b/charts/streams-app/templates/deployment.yaml index 5b98e856..c13b080c 100644 --- a/charts/streams-app/templates/deployment.yaml +++ b/charts/streams-app/templates/deployment.yaml @@ -111,7 +111,7 @@ spec: - name: KAFKA_JMX_PORT value: "{{ .Values.jmx.port }}" {{- end }} - {{- if and (.Values.streams.optimizeLeaveGroupBehavior) (not .Values.statefulSet) }} + {{- if not .Values.statefulSet }} - name: "{{ .Values.configurationEnvPrefix }}_VOLATILE_GROUP_INSTANCE_ID" value: "true" {{- end }} @@ -123,10 +123,6 @@ spec: - name: "{{ .Values.configurationEnvPrefix }}_SCHEMA_REGISTRY_URL" value: {{ .Values.streams.schemaRegistryUrl | quote }} {{- end }} - {{- if hasKey .Values.streams "productive" }} - - name: "{{ .Values.configurationEnvPrefix }}_PRODUCTIVE" - value: {{ .Values.streams.productive | quote }} - {{- end }} {{- if hasKey .Values "debug" }} - name: "{{ .Values.configurationEnvPrefix }}_DEBUG" value: {{ .Values.debug | quote }} diff --git a/charts/streams-app/values.yaml b/charts/streams-app/values.yaml index 43749141..b3627e9d 100644 --- a/charts/streams-app/values.yaml +++ b/charts/streams-app/values.yaml @@ -27,7 +27,6 @@ streams: # brokers: "test:9092" # schemaRegistryUrl: "url:1234" staticMembership: false - optimizeLeaveGroupBehavior: true config: {} # max.poll.records: 500 # Note that YAML may convert large integers to scientific notation. Use Strings to avoid this. @@ -46,7 +45,6 @@ streams: extraOutputTopics: {} # role: output # errorTopic: error - # productive: true commandLine: {} # MY_CLI_PARAM: "foo-bar" diff --git a/gradle.properties b/gradle.properties index 9a9e33a9..2293871c 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,7 +1,13 @@ -version=2.23.1-SNAPSHOT +version=3.0.0-SNAPSHOT org.gradle.caching=true -org.gradle.parallel=true +# running Kafka JUnit in parallel causes problems +org.gradle.parallel=false kafkaVersion=3.6.1 +kafkaJunitVersion=3.6.0 confluentVersion=7.6.0 -fluentKafkaVersion=2.13.1 -org.gradle.jvmargs=-Xmx2048m +fluentKafkaVersion=2.14.0 +junitVersion=5.10.2 +mockitoVersion=5.11.0 +assertJVersion=3.25.3 +log4jVersion=2.23.1 +org.gradle.jvmargs=-Xmx4096m diff --git a/settings.gradle b/settings.gradle index c7e6e4b6..e9057df9 100644 --- a/settings.gradle +++ b/settings.gradle @@ -7,7 +7,8 @@ pluginManagement { rootProject.name = 'streams-bootstrap' include( - ":streams-bootstrap", + ":streams-bootstrap-core", ":streams-bootstrap-test", ":streams-bootstrap-large-messages", + ":streams-bootstrap-cli", ) diff --git a/streams-bootstrap-cli/build.gradle.kts b/streams-bootstrap-cli/build.gradle.kts new file mode 100644 index 00000000..da3fc86c --- /dev/null +++ b/streams-bootstrap-cli/build.gradle.kts @@ -0,0 +1,34 @@ +description = "Base classes to create standalone Java applications using picocli" + +plugins { + id("com.github.davidmc24.gradle.plugin.avro") version "1.9.1" +} + +dependencies { + api(project(":streams-bootstrap-core")) + api(group = "info.picocli", name = "picocli", version = "4.7.5") + val log4jVersion: String by project + implementation(group = "org.apache.logging.log4j", name = "log4j-core", version = log4jVersion) + implementation(group = "org.apache.logging.log4j", name = "log4j-slf4j2-impl", version = log4jVersion) + + val junitVersion: String by project + testRuntimeOnly(group = "org.junit.jupiter", name = "junit-jupiter-engine", version = junitVersion) + testImplementation(group = "org.junit.jupiter", name = "junit-jupiter-api", version = junitVersion) + testImplementation(group = "org.junit.jupiter", name = "junit-jupiter-params", version = junitVersion) + val assertJVersion: String by project + testImplementation(group = "org.assertj", name = "assertj-core", version = assertJVersion) + val mockitoVersion: String by project + testImplementation(group = "org.mockito", name = "mockito-core", version = mockitoVersion) + testImplementation(group = "org.mockito", name = "mockito-junit-jupiter", version = mockitoVersion) + val kafkaJunitVersion: String by project + testImplementation(group = "net.mguenther.kafka", name = "kafka-junit", version = kafkaJunitVersion) { + exclude(group = "org.slf4j", module = "slf4j-log4j12") + } + testImplementation(group = "com.ginsberg", name = "junit5-system-exit", version = "1.1.2") + val fluentKafkaVersion: String by project + testImplementation( + group = "com.bakdata.fluent-kafka-streams-tests", + name = "schema-registry-mock-junit5", + version = fluentKafkaVersion + ) +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/EnvironmentArgumentsParser.java b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/EnvironmentArgumentsParser.java similarity index 98% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/EnvironmentArgumentsParser.java rename to streams-bootstrap-cli/src/main/java/com/bakdata/kafka/EnvironmentArgumentsParser.java index f105a308..d93d7dc8 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/EnvironmentArgumentsParser.java +++ b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/EnvironmentArgumentsParser.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -36,7 +36,7 @@ /** *

This class is primarily used to inject environment variables to the passed in command line arguments - * in {@link KafkaStreamsApplication}.

+ * in {@link KafkaPropertiesFactory}.

* *

In general a usage would look like this:

*
{@code
diff --git a/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaApplication.java b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaApplication.java
new file mode 100644
index 00000000..e0a1f84c
--- /dev/null
+++ b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaApplication.java
@@ -0,0 +1,382 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2024 bakdata
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package com.bakdata.kafka;
+
+import static java.util.Collections.emptyMap;
+
+import com.google.common.base.Preconditions;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentLinkedDeque;
+import java.util.function.Consumer;
+import lombok.AccessLevel;
+import lombok.Getter;
+import lombok.NonNull;
+import lombok.RequiredArgsConstructor;
+import lombok.Setter;
+import lombok.ToString;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.config.Configurator;
+import picocli.CommandLine;
+import picocli.CommandLine.Command;
+import picocli.CommandLine.ParseResult;
+
+/**
+ * 

The base class for creating Kafka applications.

+ * This class provides the following configuration options: + *
    + *
  • {@link #brokers}
  • + *
  • {@link #outputTopic}
  • + *
  • {@link #extraOutputTopics}
  • + *
  • {@link #brokers}
  • + *
  • {@link #debug}
  • + *
  • {@link #schemaRegistryUrl}
  • + *
  • {@link #kafkaConfig}
  • + *
+ * To implement your Kafka application inherit from this class and add your custom options. Run it by calling + * {@link #startApplication(KafkaApplication, String[])} with a instance of your class from your main. + * + * @param type of {@link Runner} used by this app + * @param type of {@link CleanUpRunner} used by this app + * @param type of execution options to create runner + * @param type of {@link ExecutableApp} used by this app + * @param type of {@link ConfiguredApp} used by this app + * @param type of topic config used by this app + * @param type of app + */ +@ToString +@Getter +@Setter +@RequiredArgsConstructor +@Slf4j +@Command(mixinStandardHelpOptions = true) +public abstract class KafkaApplication, CA extends ConfiguredApp, T, A> + implements Runnable, AutoCloseable { + private static final String ENV_PREFIX = Optional.ofNullable(System.getenv("ENV_PREFIX")).orElse("APP_"); + @ToString.Exclude + @Getter(AccessLevel.NONE) + // ConcurrentLinkedDeque required because calling #stop() causes asynchronous #run() calls to finish and thus + // concurrently iterating and removing from #runners + private final ConcurrentLinkedDeque activeApps = new ConcurrentLinkedDeque<>(); + @CommandLine.Option(names = "--output-topic", description = "Output topic") + private String outputTopic; + @CommandLine.Option(names = "--extra-output-topics", split = ",", description = "Additional named output topics") + private Map extraOutputTopics = emptyMap(); + @CommandLine.Option(names = "--brokers", required = true, description = "Broker addresses to connect to") + private String brokers; + @CommandLine.Option(names = "--debug", arity = "0..1", description = "Configure logging to debug") + private boolean debug; + @CommandLine.Option(names = "--schema-registry-url", description = "URL of Schema Registry") + private String schemaRegistryUrl; + @CommandLine.Option(names = "--kafka-config", split = ",", description = "Additional Kafka properties") + private Map kafkaConfig = emptyMap(); + + /** + *

This methods needs to be called in the executable custom application class inheriting from + * {@code KafkaApplication}.

+ *

This method calls System exit

+ * + * @param app An instance of the custom application class. + * @param args Arguments passed in by the custom application class. + * @see #startApplicationWithoutExit(KafkaApplication, String[]) + */ + public static void startApplication(final KafkaApplication app, final String[] args) { + final int exitCode = startApplicationWithoutExit(app, args); + System.exit(exitCode); + } + + /** + *

This methods needs to be called in the executable custom application class inheriting from + * {@code KafkaApplication}.

+ * + * @param app An instance of the custom application class. + * @param args Arguments passed in by the custom application class. + * @return Exit code of application + */ + public static int startApplicationWithoutExit(final KafkaApplication app, + final String[] args) { + final String[] populatedArgs = addEnvironmentVariablesArguments(args); + final CommandLine commandLine = new CommandLine(app) + .setExecutionStrategy(app::execute); + return commandLine.execute(populatedArgs); + } + + private static String[] addEnvironmentVariablesArguments(final String[] args) { + Preconditions.checkArgument(!ENV_PREFIX.equals(EnvironmentStreamsConfigParser.PREFIX), + "Prefix '" + EnvironmentStreamsConfigParser.PREFIX + "' is reserved for Streams config"); + final List environmentArguments = new EnvironmentArgumentsParser(ENV_PREFIX) + .parseVariables(System.getenv()); + final Collection allArgs = new ArrayList<>(environmentArguments); + allArgs.addAll(Arrays.asList(args)); + return allArgs.toArray(String[]::new); + } + + /** + * Create options for running the app + * @return run options if available + * @see ExecutableApp#createRunner(Object) + */ + public abstract Optional createExecutionOptions(); + + /** + * Topics used by app + * @return topic configuration + */ + public abstract T createTopicConfig(); + + /** + * Create a new app that will be configured and executed according to this application. + * + * @param cleanUp whether app is created for clean up purposes. In that case, the user might want + * to skip initialization of expensive resources. + * @return app + */ + public abstract A createApp(boolean cleanUp); + + /** + * Create a new app that will be configured and executed according to this application. + * + * @return app + */ + public A createApp() { + return this.createApp(false); + } + + /** + * Clean all resources associated with this application + */ + public void clean() { + try (final CleanableApp cleanableApp = this.createCleanableApp()) { + final CR cleanUpRunner = cleanableApp.getCleanUpRunner(); + cleanUpRunner.clean(); + } + } + + /** + * @see #stop() + */ + @Override + public void close() { + this.stop(); + } + + /** + * Stop all applications that have been started asynchronously, e.g., by using {@link #run()} or {@link #clean()}. + */ + public final void stop() { + this.activeApps.forEach(Stoppable::stop); + } + + /** + * Run the application. + */ + @Override + public void run() { + try (final RunnableApp runnableApp = this.createRunnableApp()) { + final R runner = runnableApp.getRunner(); + runner.run(); + } + } + + public KafkaEndpointConfig getEndpointConfig() { + return KafkaEndpointConfig.builder() + .brokers(this.brokers) + .schemaRegistryUrl(this.schemaRegistryUrl) + .build(); + } + + /** + * Create a new {@code ExecutableApp} that will be executed according to the requested command. + * @return {@code ExecutableApp} + */ + public final E createExecutableApp() { + return this.createExecutableApp(false); + } + + /** + * Create a new {@code ExecutableApp} that will be executed according to the requested command. + * + * @param cleanUp whether app is created for clean up purposes. In that case, the user might want to skip + * initialization of expensive resources. + * @return {@code ExecutableApp} + */ + public final E createExecutableApp(final boolean cleanUp) { + final ConfiguredApp configuredStreamsApp = this.createConfiguredApp(cleanUp); + final KafkaEndpointConfig endpointConfig = this.getEndpointConfig(); + return configuredStreamsApp.withEndpoint(endpointConfig); + } + + /** + * Create a new {@code ConfiguredApp} that will be executed according to this application. + * @return {@code ConfiguredApp} + */ + public final CA createConfiguredApp() { + return this.createConfiguredApp(false); + } + + /** + * Create a new {@code ConfiguredApp} that will be executed according to this application. + * + * @param cleanUp whether {@code ConfiguredApp} is created for clean up purposes. In that case, the user might want + * to skip initialization of expensive resources. + * @return {@code ConfiguredApp} + */ + public final CA createConfiguredApp(final boolean cleanUp) { + final AppConfiguration configuration = this.createConfiguration(); + final A app = this.createApp(cleanUp); + return this.createConfiguredApp(app, configuration); + } + + /** + * Create configuration to configure app + * @return configuration + */ + public final AppConfiguration createConfiguration() { + final T topics = this.createTopicConfig(); + return new AppConfiguration<>(topics, this.kafkaConfig); + } + + /** + * Create a new {@code RunnableApp} + * @return {@code RunnableApp} + */ + public final RunnableApp createRunnableApp() { + final ExecutableApp app = this.createExecutableApp(false); + final Optional executionOptions = this.createExecutionOptions(); + final R runner = executionOptions.map(app::createRunner).orElseGet(app::createRunner); + final RunnableApp runnableApp = new RunnableApp<>(app, runner, this.activeApps::remove); + this.activeApps.add(runnableApp); + return runnableApp; + } + + /** + * Create a new {@code CleanableApp} + * @return {@code CleanableApp} + */ + public final CleanableApp createCleanableApp() { + final ExecutableApp executableApp = this.createExecutableApp(true); + final CR cleanUpRunner = executableApp.createCleanUpRunner(); + final CleanableApp cleanableApp = new CleanableApp<>(executableApp, cleanUpRunner, this.activeApps::remove); + this.activeApps.add(cleanableApp); + return cleanableApp; + } + + /** + * Create a new {@code ConfiguredApp} that will be executed according to the given config. + * + * @param app app to configure. + * @param configuration configuration for app + * @return {@code ConfiguredApp} + */ + protected abstract CA createConfiguredApp(final A app, AppConfiguration configuration); + + /** + * Configure application when running in debug mode. By default, Log4j2 log level is configured to debug for + * {@code com.bakdata} and the applications package. + */ + protected void configureDebug() { + Configurator.setLevel("com.bakdata", Level.DEBUG); + Configurator.setLevel(this.getClass().getPackageName(), Level.DEBUG); + } + + private void startApplication() { + Runtime.getRuntime().addShutdownHook(new Thread(this::close)); + log.info("Starting application"); + if (this.debug) { + this.configureDebug(); + } + log.debug("Starting application: {}", this); + } + + private int execute(final ParseResult parseResult) { + this.startApplication(); + final int exitCode = new CommandLine.RunLast().execute(parseResult); + this.close(); + return exitCode; + } + + @FunctionalInterface + private interface Stoppable { + void stop(); + } + + /** + * Provides access to a {@link CleanUpRunner} and closes the associated {@link ExecutableApp} + */ + @RequiredArgsConstructor(access = AccessLevel.PROTECTED) + public static class CleanableApp implements AutoCloseable, Stoppable { + private final @NonNull ExecutableApp app; + @Getter + private final @NonNull CR cleanUpRunner; + private final @NonNull Consumer onClose; + + @Override + public void close() { + this.stop(); + this.onClose.accept(this); + } + + /** + * Close the app + */ + @Override + public void stop() { + this.app.close(); + } + } + + /** + * Provides access to a {@link Runner} and closes the associated {@link ExecutableApp} + */ + @RequiredArgsConstructor(access = AccessLevel.PROTECTED) + public static final class RunnableApp implements AutoCloseable, Stoppable { + private final @NonNull ExecutableApp app; + @Getter + private final @NonNull R runner; + private final @NonNull Consumer onClose; + + @Override + public void close() { + this.stop(); + this.onClose.accept(this); + } + + /** + * Close the runner and app + */ + @Override + public void stop() { + this.runner.close(); + // close app after runner because messages currently processed might depend on resources + this.app.close(); + } + } +} diff --git a/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaProducerApplication.java b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaProducerApplication.java new file mode 100644 index 00000000..e38f3408 --- /dev/null +++ b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaProducerApplication.java @@ -0,0 +1,80 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Optional; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.Setter; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; +import picocli.CommandLine.Command; + + +/** + *

The base class for creating Kafka Producer applications.

+ * This class provides all configuration options provided by {@link KafkaApplication}. + * To implement your Kafka Producer application inherit from this class and add your custom options. Run it by + * calling {@link #startApplication(KafkaApplication, String[])} with a instance of your class from your main. + */ +@ToString(callSuper = true) +@Getter +@Setter +@RequiredArgsConstructor +@Slf4j +@Command(description = "Run a Kafka Producer application") +public abstract class KafkaProducerApplication extends + KafkaApplication, ConfiguredProducerApp, ProducerTopicConfig, + ProducerApp> { + + /** + * Delete all output topics associated with the Kafka Producer application. + */ + @Command(description = "Delete all output topics associated with the Kafka Producer application.") + @Override + public void clean() { + super.clean(); + } + + @Override + public final Optional createExecutionOptions() { + return Optional.empty(); + } + + @Override + public final ProducerTopicConfig createTopicConfig() { + return ProducerTopicConfig.builder() + .outputTopic(this.getOutputTopic()) + .extraOutputTopics(this.getExtraOutputTopics()) + .build(); + } + + @Override + public final ConfiguredProducerApp createConfiguredApp(final ProducerApp app, + final AppConfiguration configuration) { + return new ConfiguredProducerApp<>(app, configuration); + } +} diff --git a/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaStreamsApplication.java b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaStreamsApplication.java new file mode 100644 index 00000000..a6e62f21 --- /dev/null +++ b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/KafkaStreamsApplication.java @@ -0,0 +1,168 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.regex.Pattern; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.Setter; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.KafkaStreams.StateListener; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; +import picocli.CommandLine; +import picocli.CommandLine.Command; +import picocli.CommandLine.UseDefaultConverter; + + +/** + *

The base class for creating Kafka Streams applications.

+ * This class provides the following configuration options in addition to those provided by {@link KafkaApplication}: + *
    + *
  • {@link #inputTopics}
  • + *
  • {@link #inputPattern}
  • + *
  • {@link #errorTopic}
  • + *
  • {@link #extraInputTopics}
  • + *
  • {@link #extraInputPatterns}
  • + *
  • {@link #volatileGroupInstanceId}
  • + *
+ * To implement your Kafka Streams application inherit from this class and add your custom options. Run it by calling + * {@link #startApplication(KafkaApplication, String[])} with a instance of your class from your main. + */ +@ToString(callSuper = true) +@Getter +@Setter +@RequiredArgsConstructor +@Slf4j +@Command(description = "Run a Kafka Streams application.") +public abstract class KafkaStreamsApplication extends + KafkaApplication, ConfiguredStreamsApp, StreamsTopicConfig, StreamsApp> { + @CommandLine.Option(names = "--input-topics", description = "Input topics", split = ",") + private List inputTopics = emptyList(); + @CommandLine.Option(names = "--input-pattern", description = "Input pattern") + private Pattern inputPattern; + @CommandLine.Option(names = "--error-topic", description = "Error topic") + private String errorTopic; + @CommandLine.Option(names = "--extra-input-topics", split = ",", description = "Additional named input topics", + converter = {UseDefaultConverter.class, StringListConverter.class}) + private Map> extraInputTopics = emptyMap(); + @CommandLine.Option(names = "--extra-input-patterns", split = ",", description = "Additional named input patterns") + private Map extraInputPatterns = emptyMap(); + @CommandLine.Option(names = "--volatile-group-instance-id", arity = "0..1", + description = "Whether the group instance id is volatile, i.e., it will change on a Streams shutdown.") + private boolean volatileGroupInstanceId; + + /** + * Reset the Kafka Streams application. Additionally, delete the consumer group and all output and intermediate + * topics associated with the Kafka Streams application. + */ + @Command(description = "Reset the Kafka Streams application. Additionally, delete the consumer group and all " + + "output and intermediate topics associated with the Kafka Streams application.") + @Override + public void clean() { + super.clean(); + } + + /** + * Clear all state stores, consumer group offsets, and internal topics associated with the Kafka Streams + * application. + */ + @Command(description = "Clear all state stores, consumer group offsets, and internal topics associated with the " + + "Kafka Streams application.") + public void reset() { + try (final CleanableApp app = this.createCleanableApp()) { + final StreamsCleanUpRunner runner = app.getCleanUpRunner(); + runner.reset(); + } + } + + @Override + public final Optional createExecutionOptions() { + final StreamsExecutionOptions options = StreamsExecutionOptions.builder() + .volatileGroupInstanceId(this.volatileGroupInstanceId) + .uncaughtExceptionHandler(this::createUncaughtExceptionHandler) + .stateListener(this::createStateListener) + .onStart(this::onStreamsStart) + .build(); + return Optional.of(options); + } + + @Override + public final StreamsTopicConfig createTopicConfig() { + return StreamsTopicConfig.builder() + .inputTopics(this.inputTopics) + .extraInputTopics(this.extraInputTopics) + .inputPattern(this.inputPattern) + .extraInputPatterns(this.extraInputPatterns) + .outputTopic(this.getOutputTopic()) + .extraOutputTopics(this.getExtraOutputTopics()) + .errorTopic(this.errorTopic) + .build(); + } + + @Override + public final ConfiguredStreamsApp createConfiguredApp(final StreamsApp app, + final AppConfiguration configuration) { + return new ConfiguredStreamsApp<>(app, configuration); + } + + /** + * Create a {@link StateListener} to use for Kafka Streams. + * + * @return {@code StateListener}. {@link NoOpStateListener} by default + * @see KafkaStreams#setStateListener(StateListener) + */ + protected StateListener createStateListener() { + return new NoOpStateListener(); + } + + /** + * Create a {@link StreamsUncaughtExceptionHandler} to use for Kafka Streams. + * + * @return {@code StreamsUncaughtExceptionHandler}. {@link DefaultStreamsUncaughtExceptionHandler} by default + * @see KafkaStreams#setUncaughtExceptionHandler(StreamsUncaughtExceptionHandler) + */ + protected StreamsUncaughtExceptionHandler createUncaughtExceptionHandler() { + return new DefaultStreamsUncaughtExceptionHandler(); + } + + /** + * Called after starting Kafka Streams + * @param runningStreams running {@link KafkaStreams} instance along with its {@link StreamsConfig} and + * {@link org.apache.kafka.streams.Topology} + */ + protected void onStreamsStart(final RunningStreams runningStreams) { + // do nothing by default + } +} diff --git a/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/SimpleKafkaProducerApplication.java b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/SimpleKafkaProducerApplication.java new file mode 100644 index 00000000..e14e764a --- /dev/null +++ b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/SimpleKafkaProducerApplication.java @@ -0,0 +1,51 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.function.Function; +import java.util.function.Supplier; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; + +/** + * {@code KafkaProducerApplication} without any additional configuration options. + */ +@RequiredArgsConstructor +public final class SimpleKafkaProducerApplication extends KafkaProducerApplication { + private final @NonNull Function appFactory; + + /** + * Create new {@code SimpleKafkaProducerApplication} + * @param appFactory factory to create {@code ProducerApp} without any parameters + */ + public SimpleKafkaProducerApplication(final Supplier appFactory) { + this(cleanUp -> appFactory.get()); + } + + @Override + public ProducerApp createApp(final boolean cleanUp) { + return this.appFactory.apply(cleanUp); + } +} diff --git a/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/SimpleKafkaStreamsApplication.java b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/SimpleKafkaStreamsApplication.java new file mode 100644 index 00000000..1279cdd8 --- /dev/null +++ b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/SimpleKafkaStreamsApplication.java @@ -0,0 +1,52 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.function.Function; +import java.util.function.Supplier; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; + +/** + * {@code KafkaStreamsApplication} without any additional configuration options. + */ +@RequiredArgsConstructor +public final class SimpleKafkaStreamsApplication extends KafkaStreamsApplication { + + private final @NonNull Function appFactory; + + /** + * Create new {@code SimpleKafkaStreamsApplication} + * @param appFactory factory to create {@code StreamsApp} without any parameters + */ + public SimpleKafkaStreamsApplication(final Supplier appFactory) { + this(cleanUp -> appFactory.get()); + } + + @Override + public StreamsApp createApp(final boolean cleanUp) { + return this.appFactory.apply(cleanUp); + } +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/StringListConverter.java b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/StringListConverter.java similarity index 89% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/StringListConverter.java rename to streams-bootstrap-cli/src/main/java/com/bakdata/kafka/StringListConverter.java index 9dceec13..a31657bc 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/StringListConverter.java +++ b/streams-bootstrap-cli/src/main/java/com/bakdata/kafka/StringListConverter.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -32,10 +32,10 @@ * Converter for lists inside collection type parsed by PicoCLI. List members need to be separated by {@code ;} */ public class StringListConverter implements ITypeConverter> { - private static final Splitter TOPIC_SPLITTER = Splitter.on(";").omitEmptyStrings().trimResults(); + private static final Splitter SPLITTER = Splitter.on(";").omitEmptyStrings().trimResults(); @Override public List convert(final String value) { - return TOPIC_SPLITTER.splitToList(value); + return SPLITTER.splitToList(value); } } diff --git a/streams-bootstrap/src/test/avro/TestRecord.avsc b/streams-bootstrap-cli/src/test/avro/TestRecord.avsc similarity index 100% rename from streams-bootstrap/src/test/avro/TestRecord.avsc rename to streams-bootstrap-cli/src/test/avro/TestRecord.avsc diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/CliTest.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/CliTest.java similarity index 52% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/CliTest.java rename to streams-bootstrap-cli/src/test/java/com/bakdata/kafka/CliTest.java index 4593062e..cdede819 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/CliTest.java +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/CliTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,8 +24,7 @@ package com.bakdata.kafka; -import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; -import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig; +import static com.bakdata.kafka.TestUtil.newKafkaCluster; import static net.mguenther.kafka.junit.Wait.delay; import static org.assertj.core.api.Assertions.assertThat; @@ -39,7 +38,6 @@ import net.mguenther.kafka.junit.SendKeyValues; import net.mguenther.kafka.junit.TopicConfig; import org.apache.kafka.common.serialization.Serdes; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.Consumed; import org.junit.jupiter.api.Test; @@ -54,13 +52,18 @@ private static void runApp(final KafkaStreamsApplication app, final String... ar void shouldExitWithSuccessCode() { KafkaApplication.startApplication(new KafkaStreamsApplication() { @Override - public void buildTopology(final StreamsBuilder builder) { - throw new UnsupportedOperationException(); - } + public StreamsApp createApp(final boolean cleanUp) { + return new StreamsApp() { + @Override + public void buildTopology(final TopologyBuilder builder) { + throw new UnsupportedOperationException(); + } - @Override - public String getUniqueAppId() { - throw new UnsupportedOperationException(); + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + throw new UnsupportedOperationException(); + } + }; } @Override @@ -78,22 +81,17 @@ public void run() { @Test @ExpectSystemExitWithStatus(1) void shouldExitWithErrorCodeOnRunError() { - KafkaApplication.startApplication(new KafkaStreamsApplication() { + KafkaApplication.startApplication(new SimpleKafkaStreamsApplication(() -> new StreamsApp() { @Override - public void buildTopology(final StreamsBuilder builder) { + public void buildTopology(final TopologyBuilder builder) { throw new UnsupportedOperationException(); } @Override - public String getUniqueAppId() { + public String getUniqueAppId(final StreamsTopicConfig topics) { throw new UnsupportedOperationException(); } - - @Override - public void run() { - throw new RuntimeException(); - } - }, new String[]{ + }), new String[]{ "--brokers", "localhost:9092", "--schema-registry-url", "http://localhost:8081", "--input-topics", "input", @@ -106,17 +104,22 @@ public void run() { void shouldExitWithErrorCodeOnCleanupError() { KafkaApplication.startApplication(new KafkaStreamsApplication() { @Override - public void buildTopology(final StreamsBuilder builder) { - throw new UnsupportedOperationException(); - } + public StreamsApp createApp(final boolean cleanUp) { + return new StreamsApp() { + @Override + public void buildTopology(final TopologyBuilder builder) { + throw new UnsupportedOperationException(); + } - @Override - public String getUniqueAppId() { - throw new UnsupportedOperationException(); + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + throw new UnsupportedOperationException(); + } + }; } @Override - protected void runCleanUp() { + public void clean() { throw new RuntimeException(); } }, new String[]{ @@ -124,7 +127,7 @@ protected void runCleanUp() { "--schema-registry-url", "http://localhost:8081", "--input-topics", "input", "--output-topic", "output", - "--clean-up", + "clean", }); } @@ -133,13 +136,18 @@ protected void runCleanUp() { void shouldExitWithErrorCodeOnMissingBrokerParameter() { KafkaApplication.startApplication(new KafkaStreamsApplication() { @Override - public void buildTopology(final StreamsBuilder builder) { - throw new UnsupportedOperationException(); - } + public StreamsApp createApp(final boolean cleanUp) { + return new StreamsApp() { + @Override + public void buildTopology(final TopologyBuilder builder) { + throw new UnsupportedOperationException(); + } - @Override - public String getUniqueAppId() { - throw new UnsupportedOperationException(); + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + throw new UnsupportedOperationException(); + } + }; } @Override @@ -157,21 +165,21 @@ public void run() { @ExpectSystemExitWithStatus(1) void shouldExitWithErrorInTopology() throws InterruptedException { final String input = "input"; - try (final EmbeddedKafkaCluster kafkaCluster = provisionWith(defaultClusterConfig()); - final KafkaStreamsApplication app = new KafkaStreamsApplication() { + try (final EmbeddedKafkaCluster kafkaCluster = newKafkaCluster(); + final KafkaStreamsApplication app = new SimpleKafkaStreamsApplication(() -> new StreamsApp() { @Override - public void buildTopology(final StreamsBuilder builder) { - builder.stream(this.getInputTopics(), Consumed.with(Serdes.ByteArray(), Serdes.ByteArray())) + public void buildTopology(final TopologyBuilder builder) { + builder.streamInput(Consumed.with(Serdes.ByteArray(), Serdes.ByteArray())) .peek((k, v) -> { throw new RuntimeException(); }); } @Override - public String getUniqueAppId() { + public String getUniqueAppId(final StreamsTopicConfig topics) { return "app"; } - }) { + })) { kafkaCluster.start(); kafkaCluster.createTopic(TopicConfig.withName(input).build()); @@ -190,19 +198,19 @@ public String getUniqueAppId() { void shouldExitWithSuccessCodeOnShutdown() throws InterruptedException { final String input = "input"; final String output = "output"; - try (final EmbeddedKafkaCluster kafkaCluster = provisionWith(defaultClusterConfig()); - final KafkaStreamsApplication app = new KafkaStreamsApplication() { + try (final EmbeddedKafkaCluster kafkaCluster = newKafkaCluster(); + final KafkaStreamsApplication app = new SimpleKafkaStreamsApplication(() -> new StreamsApp() { @Override - public void buildTopology(final StreamsBuilder builder) { - builder.stream(this.getInputTopics(), Consumed.with(Serdes.ByteArray(), Serdes.ByteArray())) - .to(this.getOutputTopic()); + public void buildTopology(final TopologyBuilder builder) { + builder.streamInput(Consumed.with(Serdes.ByteArray(), Serdes.ByteArray())) + .to(builder.getTopics().getOutputTopic()); } @Override - public String getUniqueAppId() { + public String getUniqueAppId(final StreamsTopicConfig topics) { return "app"; } - }) { + })) { kafkaCluster.start(); kafkaCluster.createTopic(TopicConfig.withName(input).build()); kafkaCluster.createTopic(TopicConfig.withName(output).build()); @@ -227,81 +235,87 @@ public String getUniqueAppId() { @Test @ExpectSystemExitWithStatus(1) - void shouldExitWithSuccessCodeOnCleanupError() { + void shouldExitWithErrorOnCleanupError() { KafkaApplication.startApplication(new KafkaStreamsApplication() { @Override - public void buildTopology(final StreamsBuilder builder) { - throw new UnsupportedOperationException(); - } - - @Override - public String getUniqueAppId() { - throw new UnsupportedOperationException(); - } + public StreamsApp createApp(final boolean cleanUp) { + return new StreamsApp() { + @Override + public void buildTopology(final TopologyBuilder builder) { + throw new UnsupportedOperationException(); + } - @Override - protected void runCleanUp() { - // do nothing + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + throw new UnsupportedOperationException(); + } + }; } }, new String[]{ "--brokers", "localhost:9092", "--schema-registry-url", "http://localhost:8081", "--input-topics", "input", "--output-topic", "output", - "--clean-up", + "clean", }); } @Test void shouldParseArguments() { - final KafkaStreamsApplication app = new KafkaStreamsApplication() { + try (final KafkaStreamsApplication app = new KafkaStreamsApplication() { @Override - public void buildTopology(final StreamsBuilder builder) { - throw new UnsupportedOperationException(); - } + public StreamsApp createApp(final boolean cleanUp) { + return new StreamsApp() { + @Override + public void buildTopology(final TopologyBuilder builder) { + throw new UnsupportedOperationException(); + } - @Override - public String getUniqueAppId() { - throw new UnsupportedOperationException(); + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + throw new UnsupportedOperationException(); + } + }; } @Override public void run() { // do nothing } - }; - KafkaApplication.startApplicationWithoutExit(app, new String[]{ - "--brokers", "brokers", - "--schema-registry-url", "schema-registry", - "--input-topics", "input1,input2", - "--extra-input-topics", "role1=input3,role2=input4;input5", - "--input-pattern", ".*", - "--extra-input-patterns", "role1=.+,role2=\\d+", - "--output-topic", "output1", - "--extra-output-topics", "role1=output2,role2=output3", - }); - assertThat(app.getInputTopics()).containsExactly("input1", "input2"); - assertThat(app.getExtraInputTopics()) - .hasSize(2) - .containsEntry("role1", List.of("input3")) - .containsEntry("role2", List.of("input4", "input5")); - assertThat(app.getInputTopics("role1")).isEqualTo(List.of("input3")); - assertThat(app.getInputTopic("role2")).isEqualTo("input4"); - assertThat(app.getInputTopics("role2")).isEqualTo(List.of("input4", "input5")); - assertThat(app.getInputPattern()) - .satisfies(pattern -> assertThat(pattern.pattern()).isEqualTo(Pattern.compile(".*").pattern())); - assertThat(app.getExtraInputPatterns()) - .hasSize(2) - .hasEntrySatisfying("role1", - pattern -> assertThat(pattern.pattern()).isEqualTo(Pattern.compile(".+").pattern())) - .hasEntrySatisfying("role2", - pattern -> assertThat(pattern.pattern()).isEqualTo(Pattern.compile("\\d+").pattern())); - assertThat(app.getInputPattern("role1").pattern()).isEqualTo(Pattern.compile(".+").pattern()); - assertThat(app.getInputPattern("role2").pattern()).isEqualTo(Pattern.compile("\\d+").pattern()); - assertThat(app.getOutputTopic()).isEqualTo("output1"); - assertThat(app.getExtraOutputTopics()) - .hasSize(2) - .containsEntry("role1", "output2") - .containsEntry("role2", "output3"); + }) { + KafkaApplication.startApplicationWithoutExit(app, new String[]{ + "--brokers", "brokers", + "--schema-registry-url", "schema-registry", + "--input-topics", "input1,input2", + "--extra-input-topics", "role1=input3,role2=input4;input5", + "--input-pattern", ".*", + "--extra-input-patterns", "role1=.+,role2=\\d+", + "--output-topic", "output1", + "--extra-output-topics", "role1=output2,role2=output3", + "--kafka-config", "foo=1,bar=2", + }); + assertThat(app.getInputTopics()).containsExactly("input1", "input2"); + assertThat(app.getExtraInputTopics()) + .hasSize(2) + .containsEntry("role1", List.of("input3")) + .containsEntry("role2", List.of("input4", "input5")); + assertThat(app.getInputPattern()) + .satisfies(pattern -> assertThat(pattern.pattern()).isEqualTo(Pattern.compile(".*").pattern())); + assertThat(app.getExtraInputPatterns()) + .hasSize(2) + .hasEntrySatisfying("role1", + pattern -> assertThat(pattern.pattern()).isEqualTo(Pattern.compile(".+").pattern())) + .hasEntrySatisfying("role2", + pattern -> assertThat(pattern.pattern()).isEqualTo(Pattern.compile("\\d+").pattern())); + assertThat(app.getOutputTopic()).isEqualTo("output1"); + assertThat(app.getExtraOutputTopics()) + .hasSize(2) + .containsEntry("role1", "output2") + .containsEntry("role2", "output3"); + assertThat(app.getKafkaConfig()) + .hasSize(2) + .containsEntry("foo", "1") + .containsEntry("bar", "2"); + } } } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/CloseFlagApp.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/CloseFlagApp.java similarity index 66% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/CloseFlagApp.java rename to streams-bootstrap-cli/src/test/java/com/bakdata/kafka/CloseFlagApp.java index fdeb73cf..6e928d43 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/CloseFlagApp.java +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/CloseFlagApp.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -27,7 +27,6 @@ import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.KStream; @NoArgsConstructor @@ -36,28 +35,32 @@ public class CloseFlagApp extends KafkaStreamsApplication { private boolean closed = false; - private Boolean leaveGroup; + private boolean appClosed = false; @Override public void close() { - this.closed = true; super.close(); + this.closed = true; } @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = builder.stream(this.getInputTopics()); - input.to(this.getOutputTopic()); - } + public StreamsApp createApp(final boolean cleanUp) { + return new StreamsApp() { + @Override + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); + input.to(builder.getTopics().getOutputTopic()); + } - @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); - } + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + return CloseFlagApp.this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); + } - @Override - void closeStreams(final boolean leaveGroup) { - this.leaveGroup = leaveGroup; - super.closeStreams(leaveGroup); + @Override + public void close() { + CloseFlagApp.this.appClosed = true; + } + }; } } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/EnvironmentArgumentsParserTest.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/EnvironmentArgumentsParserTest.java similarity index 99% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/EnvironmentArgumentsParserTest.java rename to streams-bootstrap-cli/src/test/java/com/bakdata/kafka/EnvironmentArgumentsParserTest.java index 7a2395d3..64d5ac4f 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/EnvironmentArgumentsParserTest.java +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/EnvironmentArgumentsParserTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/StringListConverterTest.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/StringListConverterTest.java similarity index 98% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/StringListConverterTest.java rename to streams-bootstrap-cli/src/test/java/com/bakdata/kafka/StringListConverterTest.java index 19f86232..0f08cc1b 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/StringListConverterTest.java +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/StringListConverterTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/TestUtil.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/TestUtil.java new file mode 100644 index 00000000..c89ffa4f --- /dev/null +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/TestUtil.java @@ -0,0 +1,44 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; +import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.newClusterConfig; +import static net.mguenther.kafka.junit.EmbeddedKafkaConfig.brokers; + +import lombok.experimental.UtilityClass; +import net.mguenther.kafka.junit.EmbeddedKafkaCluster; + +@UtilityClass +public class TestUtil { + public static EmbeddedKafkaCluster newKafkaCluster() { + return provisionWith(newClusterConfig() + .configure(brokers() + .with("transaction.state.log.num.partitions", 10) + .with("offsets.topic.num.partitions", 10) + .build()) + .build()); + } +} diff --git a/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/RunProducerAppTest.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/RunProducerAppTest.java new file mode 100644 index 00000000..d460658b --- /dev/null +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/RunProducerAppTest.java @@ -0,0 +1,120 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + +import static com.bakdata.kafka.TestUtil.newKafkaCluster; +import static net.mguenther.kafka.junit.Wait.delay; +import static org.assertj.core.api.Assertions.assertThat; + +import com.bakdata.kafka.KafkaProducerApplication; +import com.bakdata.kafka.ProducerApp; +import com.bakdata.kafka.ProducerBuilder; +import com.bakdata.kafka.ProducerRunnable; +import com.bakdata.kafka.SimpleKafkaProducerApplication; +import com.bakdata.kafka.TestRecord; +import com.bakdata.schemaregistrymock.junit5.SchemaRegistryMockExtension; +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; +import io.confluent.kafka.streams.serdes.avro.SpecificAvroDeserializer; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import net.mguenther.kafka.junit.EmbeddedKafkaCluster; +import net.mguenther.kafka.junit.ReadKeyValues; +import net.mguenther.kafka.junit.TopicConfig; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +class RunProducerAppTest { + private static final int TIMEOUT_SECONDS = 10; + @RegisterExtension + final SchemaRegistryMockExtension schemaRegistryMockExtension = new SchemaRegistryMockExtension(); + private final EmbeddedKafkaCluster kafkaCluster = newKafkaCluster(); + + @BeforeEach + void setup() { + this.kafkaCluster.start(); + } + + @AfterEach + void tearDown() { + this.kafkaCluster.stop(); + } + + @Test + void shouldRunApp() throws InterruptedException { + final String output = "output"; + this.kafkaCluster.createTopic(TopicConfig.withName(output).useDefaults()); + try (final KafkaProducerApplication app = new SimpleKafkaProducerApplication(() -> new ProducerApp() { + @Override + public ProducerRunnable buildRunnable(final ProducerBuilder builder) { + return () -> { + try (final Producer producer = builder.createProducer()) { + final TestRecord testRecord = TestRecord.newBuilder().setContent("bar").build(); + producer.send(new ProducerRecord<>(builder.getTopics().getOutputTopic(), "foo", testRecord)); + } + }; + } + + @Override + public Map createKafkaProperties() { + return Map.of( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class + ); + } + })) { + app.setBrokers(this.kafkaCluster.getBrokerList()); + app.setSchemaRegistryUrl(this.schemaRegistryMockExtension.getUrl()); + app.setOutputTopic(output); + app.setKafkaConfig(Map.of( + ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000" + )); + app.run(); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertThat(this.kafkaCluster.read(ReadKeyValues.from(output, String.class, TestRecord.class) + .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) + .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, SpecificAvroDeserializer.class) + .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + this.schemaRegistryMockExtension.getUrl()) + .build())) + .hasSize(1) + .anySatisfy(kv -> { + assertThat(kv.getKey()).isEqualTo("foo"); + assertThat(kv.getValue().getContent()).isEqualTo("bar"); + }); + app.clean(); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertThat(this.kafkaCluster.exists(app.getOutputTopic())) + .as("Output topic is deleted") + .isFalse(); + } + } +} diff --git a/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/RunStreamsAppTest.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/RunStreamsAppTest.java new file mode 100644 index 00000000..57111db8 --- /dev/null +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/RunStreamsAppTest.java @@ -0,0 +1,96 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + +import static com.bakdata.kafka.TestUtil.newKafkaCluster; +import static net.mguenther.kafka.junit.Wait.delay; +import static org.assertj.core.api.Assertions.assertThat; + +import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.SimpleKafkaStreamsApplication; +import com.bakdata.kafka.test_applications.Mirror; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import net.mguenther.kafka.junit.EmbeddedKafkaCluster; +import net.mguenther.kafka.junit.KeyValue; +import net.mguenther.kafka.junit.ReadKeyValues; +import net.mguenther.kafka.junit.SendKeyValuesTransactional; +import net.mguenther.kafka.junit.TopicConfig; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class RunStreamsAppTest { + private static final int TIMEOUT_SECONDS = 10; + private final EmbeddedKafkaCluster kafkaCluster = newKafkaCluster(); + + @BeforeEach + void setup() { + this.kafkaCluster.start(); + } + + @AfterEach + void tearDown() { + this.kafkaCluster.stop(); + } + + @Test + void shouldRunApp() throws InterruptedException { + final String input = "input"; + final String output = "output"; + this.kafkaCluster.createTopic(TopicConfig.withName(input).useDefaults()); + this.kafkaCluster.createTopic(TopicConfig.withName(output).useDefaults()); + try (final KafkaStreamsApplication app = new SimpleKafkaStreamsApplication(Mirror::new)) { + app.setBrokers(this.kafkaCluster.getBrokerList()); + app.setKafkaConfig(Map.of( + ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000" + )); + app.setInputTopics(List.of(input)); + app.setOutputTopic(output); + // run in Thread because the application blocks indefinitely + new Thread(app).start(); + final SendKeyValuesTransactional kvSendKeyValuesTransactionalBuilder = + SendKeyValuesTransactional.inTransaction(input, List.of(new KeyValue<>("foo", "bar"))) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .build(); + this.kafkaCluster.send(kvSendKeyValuesTransactionalBuilder); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertThat(this.kafkaCluster.read(ReadKeyValues.from(output, String.class, String.class) + .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) + .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) + .build())) + .hasSize(1); + } + } +} diff --git a/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpTest.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpTest.java new file mode 100644 index 00000000..b687f602 --- /dev/null +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpTest.java @@ -0,0 +1,208 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + + +import static com.bakdata.kafka.TestUtil.newKafkaCluster; +import static net.mguenther.kafka.junit.Wait.delay; + +import com.bakdata.kafka.CloseFlagApp; +import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.SimpleKafkaStreamsApplication; +import com.bakdata.kafka.test_applications.WordCount; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.extern.slf4j.Slf4j; +import net.mguenther.kafka.junit.EmbeddedKafkaCluster; +import net.mguenther.kafka.junit.KeyValue; +import net.mguenther.kafka.junit.ReadKeyValues; +import net.mguenther.kafka.junit.SendValuesTransactional; +import net.mguenther.kafka.junit.TopicConfig; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.common.serialization.LongDeserializer; +import org.apache.kafka.streams.StreamsConfig; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@Slf4j +@ExtendWith(SoftAssertionsExtension.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class StreamsCleanUpTest { + private static final int TIMEOUT_SECONDS = 10; + private final EmbeddedKafkaCluster kafkaCluster = newKafkaCluster(); + @InjectSoftAssertions + private SoftAssertions softly; + + private static void runAppAndClose(final KafkaStreamsApplication app) throws InterruptedException { + runApp(app); + app.stop(); + } + + private static void runApp(final KafkaStreamsApplication app) throws InterruptedException { + // run in Thread because the application blocks indefinitely + new Thread(app).start(); + // Wait until stream application has consumed all data + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + @BeforeEach + void setup() throws InterruptedException { + this.kafkaCluster.start(); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + @AfterEach + void tearDown() throws InterruptedException { + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + this.kafkaCluster.stop(); + } + + @Test + void shouldClean() throws InterruptedException { + try (final KafkaStreamsApplication app = this.createWordCountApplication()) { + final SendValuesTransactional sendRequest = + SendValuesTransactional.inTransaction(app.getInputTopics().get(0), + List.of("blub", "bla", "blub")).useDefaults(); + this.kafkaCluster.send(sendRequest); + + final List> expectedValues = List.of( + new KeyValue<>("blub", 1L), + new KeyValue<>("bla", 1L), + new KeyValue<>("blub", 2L) + ); + this.runAndAssertContent(expectedValues, "All entries are once in the input topic after the 1st run", app); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + app.clean(); + + this.softly.assertThat(this.kafkaCluster.exists(app.getOutputTopic())) + .as("Output topic is deleted") + .isFalse(); + + this.runAndAssertContent(expectedValues, "All entries are once in the input topic after the 2nd run", app); + } + } + + @Test + void shouldReset() throws InterruptedException { + try (final KafkaStreamsApplication app = this.createWordCountApplication()) { + final SendValuesTransactional sendRequest = + SendValuesTransactional.inTransaction(app.getInputTopics().get(0), + List.of("blub", "bla", "blub")).useDefaults(); + this.kafkaCluster.send(sendRequest); + + final List> expectedValues = List.of( + new KeyValue<>("blub", 1L), + new KeyValue<>("bla", 1L), + new KeyValue<>("blub", 2L) + ); + this.runAndAssertContent(expectedValues, "All entries are once in the input topic after the 1st run", app); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + app.reset(); + + final List> entriesTwice = expectedValues.stream() + .flatMap(entry -> Stream.of(entry, entry)) + .collect(Collectors.toList()); + this.runAndAssertContent(entriesTwice, "All entries are twice in the input topic after the 2nd run", app); + } + } + + @Test + void shouldCallClose() throws InterruptedException { + try (final CloseFlagApp app = this.createCloseFlagApplication()) { + this.kafkaCluster.createTopic(TopicConfig.withName(app.getInputTopics().get(0)).useDefaults()); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + this.softly.assertThat(app.isClosed()).isFalse(); + this.softly.assertThat(app.isAppClosed()).isFalse(); + // if we don't run the app, the coordinator will be unavailable + runAppAndClose(app); + this.softly.assertThat(app.isAppClosed()).isTrue(); + app.setAppClosed(false); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + app.clean(); + this.softly.assertThat(app.isAppClosed()).isTrue(); + app.setAppClosed(false); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + app.reset(); + this.softly.assertThat(app.isAppClosed()).isTrue(); + } + } + + private CloseFlagApp createCloseFlagApplication() { + final CloseFlagApp app = new CloseFlagApp(); + app.setInputTopics(List.of("input")); + app.setOutputTopic("output"); + return this.configure(app); + } + + private List> readOutputTopic(final String outputTopic) throws InterruptedException { + final ReadKeyValues readRequest = ReadKeyValues.from(outputTopic, Long.class) + .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class).build(); + return this.kafkaCluster.read(readRequest); + } + + private void runAndAssertContent(final Iterable> expectedValues, + final String description, final KafkaStreamsApplication app) + throws InterruptedException { + runAppAndClose(app); + + final List> output = this.readOutputTopic(app.getOutputTopic()); + this.softly.assertThat(output) + .as(description) + .containsExactlyInAnyOrderElementsOf(expectedValues); + } + + private KafkaStreamsApplication createWordCountApplication() { + final KafkaStreamsApplication application = new SimpleKafkaStreamsApplication(WordCount::new); + application.setOutputTopic("word_output"); + application.setInputTopics(List.of("word_input")); + return this.configure(application); + } + + private T configure(final T application) { + application.setBrokers(this.kafkaCluster.getBrokerList()); + application.setKafkaConfig(Map.of( + StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, "0", + ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000" + )); + return application; + } + +} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/Mirror.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/test_applications/Mirror.java similarity index 70% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/Mirror.java rename to streams-bootstrap-cli/src/test/java/com/bakdata/kafka/test_applications/Mirror.java index 5dfad604..212c7611 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/Mirror.java +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/test_applications/Mirror.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,22 +24,23 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; import lombok.NoArgsConstructor; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.KStream; @NoArgsConstructor -public class Mirror extends KafkaStreamsApplication { +public class Mirror implements StreamsApp { @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = builder.stream(this.getInputTopics()); - input.to(this.getOutputTopic()); + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); + input.to(builder.getTopics().getOutputTopic()); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/WordCount.java b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/test_applications/WordCount.java similarity index 77% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/WordCount.java rename to streams-bootstrap-cli/src/test/java/com/bakdata/kafka/test_applications/WordCount.java index d67d80d4..2bcdc095 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/WordCount.java +++ b/streams-bootstrap-cli/src/test/java/com/bakdata/kafka/test_applications/WordCount.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,26 +24,24 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; import java.util.Arrays; import java.util.regex.Pattern; import lombok.NoArgsConstructor; import org.apache.kafka.common.serialization.Serdes; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KTable; import org.apache.kafka.streams.kstream.Materialized; import org.apache.kafka.streams.kstream.Produced; @NoArgsConstructor -public class WordCount extends KafkaStreamsApplication { - public static void main(final String[] args) { - startApplication(new WordCount(), args); - } +public class WordCount implements StreamsApp { @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream textLines = builder.stream(this.getInputTopics()); + public void buildTopology(final TopologyBuilder builder) { + final KStream textLines = builder.streamInput(); final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); final KTable wordCounts = textLines @@ -51,11 +49,11 @@ public void buildTopology(final StreamsBuilder builder) { .groupBy((key, word) -> word) .count(Materialized.as("counts")); - wordCounts.toStream().to(this.outputTopic, Produced.valueSerde(Serdes.Long())); + wordCounts.toStream().to(builder.getTopics().getOutputTopic(), Produced.valueSerde(Serdes.Long())); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } } diff --git a/streams-bootstrap-cli/src/test/resources/log4j2.xml b/streams-bootstrap-cli/src/test/resources/log4j2.xml new file mode 100644 index 00000000..0d4071ce --- /dev/null +++ b/streams-bootstrap-cli/src/test/resources/log4j2.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/streams-bootstrap/build.gradle.kts b/streams-bootstrap-core/build.gradle.kts similarity index 78% rename from streams-bootstrap/build.gradle.kts rename to streams-bootstrap-core/build.gradle.kts index fb4613e6..4bd028b1 100644 --- a/streams-bootstrap/build.gradle.kts +++ b/streams-bootstrap-core/build.gradle.kts @@ -8,15 +8,11 @@ dependencies { val kafkaVersion: String by project implementation(group = "org.apache.kafka", name = "kafka-tools", version = kafkaVersion) - implementation(group = "info.picocli", name = "picocli", version = "4.7.5") api(group = "org.apache.kafka", name = "kafka-streams", version = kafkaVersion) api(group = "org.apache.kafka", name = "kafka-clients", version = kafkaVersion) val confluentVersion: String by project implementation(group = "io.confluent", name = "kafka-streams-avro-serde", version = confluentVersion) api(group = "io.confluent", name = "kafka-schema-registry-client", version = confluentVersion) - val log4jVersion = "2.23.0" - implementation(group = "org.apache.logging.log4j", name = "log4j-core", version = log4jVersion) - implementation(group = "org.apache.logging.log4j", name = "log4j-slf4j2-impl", version = log4jVersion) api( group = "org.slf4j", name = "slf4j-api", @@ -25,29 +21,30 @@ dependencies { implementation(group = "com.google.guava", name = "guava", version = "33.0.0-jre") implementation(group = "org.jooq", name = "jool", version = "0.9.14") - val junitVersion = "5.10.2" + val junitVersion: String by project + testRuntimeOnly(group = "org.junit.jupiter", name = "junit-jupiter-engine", version = junitVersion) testImplementation(group = "org.junit.jupiter", name = "junit-jupiter-api", version = junitVersion) testImplementation(group = "org.junit.jupiter", name = "junit-jupiter-params", version = junitVersion) testImplementation(group = "org.junit-pioneer", name = "junit-pioneer", version = "2.2.0") - testRuntimeOnly(group = "org.junit.jupiter", name = "junit-jupiter-engine", version = junitVersion) - testImplementation(group = "org.assertj", name = "assertj-core", version = "3.25.3") - val mockitoVersion = "5.10.0" + val assertJVersion: String by project + testImplementation(group = "org.assertj", name = "assertj-core", version = assertJVersion) + val mockitoVersion: String by project testImplementation(group = "org.mockito", name = "mockito-core", version = mockitoVersion) testImplementation(group = "org.mockito", name = "mockito-junit-jupiter", version = mockitoVersion) val fluentKafkaVersion: String by project testImplementation(project(":streams-bootstrap-test")) - testImplementation(group = "org.apache.kafka", name = "kafka-streams-test-utils", version = kafkaVersion) testImplementation( group = "com.bakdata.fluent-kafka-streams-tests", name = "schema-registry-mock-junit5", version = fluentKafkaVersion ) - testImplementation(group = "net.mguenther.kafka", name = "kafka-junit", version = "3.6.0") { + val kafkaJunitVersion: String by project + testImplementation(group = "net.mguenther.kafka", name = "kafka-junit", version = kafkaJunitVersion) { exclude(group = "org.slf4j", module = "slf4j-log4j12") } - - testImplementation(group = "com.ginsberg", name = "junit5-system-exit", version = "1.1.2") + val log4jVersion: String by project + testImplementation(group = "org.apache.logging.log4j", name = "log4j-slf4j2-impl", version = log4jVersion) } tasks.withType { @@ -55,4 +52,5 @@ tasks.withType { "--add-opens=java.base/java.lang=ALL-UNNAMED", "--add-opens=java.base/java.util=ALL-UNNAMED" ) + maxHeapSize = "4g" } diff --git a/streams-bootstrap/lombok.config b/streams-bootstrap-core/lombok.config similarity index 100% rename from streams-bootstrap/lombok.config rename to streams-bootstrap-core/lombok.config diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/App.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/App.java new file mode 100644 index 00000000..65ed48bb --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/App.java @@ -0,0 +1,66 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyMap; + +import java.util.Map; + +/** + * Kafka application that defines necessary configurations + * @param type of topic config + * @param type of clean up config + */ +@FunctionalInterface +public interface App extends AutoCloseable { + + /** + * Configure clean up behavior + * @param configuration provides all runtime application configurations + * @return clean up configuration + */ + C setupCleanUp(final EffectiveAppConfiguration configuration); + + @Override + default void close() { + // do nothing by default + } + + /** + * This method should give a default configuration to run your application with. + * @return Returns a default Kafka configuration. Empty by default + */ + default Map createKafkaProperties() { + return emptyMap(); + } + + /** + * Setup Kafka resources, such as topics, before running this app + * @param configuration provides all runtime application configurations + */ + default void setup(final EffectiveAppConfiguration configuration) { + // do nothing by default + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/AppConfiguration.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/AppConfiguration.java new file mode 100644 index 00000000..06477203 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/AppConfiguration.java @@ -0,0 +1,55 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyMap; + +import java.util.Map; +import lombok.EqualsAndHashCode; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.Value; + +/** + * Configuration of an app. This includes topics and Kafka configuration + * @param type of topic config + */ +@Value +@RequiredArgsConstructor +@EqualsAndHashCode +public class AppConfiguration { + @NonNull + T topics; + @NonNull + Map kafkaConfig; + + /** + * Create a new {@code AppConfiguration} with empty Kafka configuration + * @param topics topics to use for app + */ + public AppConfiguration(final T topics) { + this(topics, emptyMap()); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CapturingStreamsUncaughtExceptionHandler.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CapturingStreamsUncaughtExceptionHandler.java new file mode 100644 index 00000000..93146792 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CapturingStreamsUncaughtExceptionHandler.java @@ -0,0 +1,50 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; + +@RequiredArgsConstructor +class CapturingStreamsUncaughtExceptionHandler implements StreamsUncaughtExceptionHandler { + + private final @NonNull StreamsUncaughtExceptionHandler wrapped; + private Throwable lastException; + + @Override + public StreamThreadExceptionResponse handle(final Throwable exception) { + final StreamThreadExceptionResponse response = this.wrapped.handle(exception); + this.lastException = exception; + return response; + } + + void throwException() { + if (this.lastException instanceof RuntimeException) { + throw (RuntimeException) this.lastException; + } + throw new StreamsApplicationException("Kafka Streams has transitioned to error", this.lastException); + } +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/CleanUpException.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CleanUpException.java similarity index 93% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/CleanUpException.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/CleanUpException.java index 2ded2c75..e230deef 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/CleanUpException.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CleanUpException.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -25,7 +25,7 @@ package com.bakdata.kafka; /** - * Exception thrown if running streams clean up was unsuccessful + * Exception thrown if running clean up was unsuccessful */ public class CleanUpException extends RuntimeException { public CleanUpException(final String message) { diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CleanUpRunner.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CleanUpRunner.java new file mode 100644 index 00000000..40ef7cfa --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/CleanUpRunner.java @@ -0,0 +1,36 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * Cleans all resources associated with an application + */ +@FunctionalInterface +public interface CleanUpRunner { + /** + * Clean all resources associated with an application + */ + void clean(); +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Configurable.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Configurable.java new file mode 100644 index 00000000..0cb5c6f3 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Configurable.java @@ -0,0 +1,38 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Map; + +@FunctionalInterface +interface Configurable { + /** + * Configure this class + * @param config configs in key/value pairs + * @param isKey whether is for key or value + * @return configured instance + */ + T configure(Map config, boolean isKey); +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfigurableSerde.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfigurableSerde.java new file mode 100644 index 00000000..76cb7147 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfigurableSerde.java @@ -0,0 +1,44 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Map; +import lombok.AccessLevel; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.common.serialization.Serde; + +@RequiredArgsConstructor(access = AccessLevel.PACKAGE) +final class ConfigurableSerde, T> implements Configurable { + + private final @NonNull S serde; + + @Override + public S configure(final Map config, final boolean isKey) { + this.serde.configure(config, isKey); + return this.serde; + } + +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfigurableSerializer.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfigurableSerializer.java new file mode 100644 index 00000000..9cd8ffef --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfigurableSerializer.java @@ -0,0 +1,43 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Map; +import lombok.AccessLevel; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.common.serialization.Serializer; + +@RequiredArgsConstructor(access = AccessLevel.PACKAGE) +final class ConfigurableSerializer, T> implements Configurable { + + private final @NonNull S serializer; + + @Override + public S configure(final Map config, final boolean isKey) { + this.serializer.configure(config, isKey); + return this.serializer; + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Configurator.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Configurator.java new file mode 100644 index 00000000..1fd308fb --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Configurator.java @@ -0,0 +1,149 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static com.bakdata.kafka.Preconfigured.create; + +import java.util.Map; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.common.serialization.Serde; +import org.apache.kafka.common.serialization.Serializer; + +/** + * Configure {@link Serde} and {@link Serializer} using base properties + */ +@RequiredArgsConstructor +public class Configurator { + + private final @NonNull Map kafkaProperties; + + /** + * Configure a {@code Serde} for values using {@link #kafkaProperties} + * @param serde serde to configure + * @return configured {@code Serde} + * @param type to be (de-)serialized + */ + public Serde configureForValues(final Serde serde) { + return this.configureForValues(create(serde)); + } + + /** + * Configure a {@code Serde} for values using {@link #kafkaProperties} and config overrides + * @param serde serde to configure + * @param configOverrides configuration overrides + * @return configured {@code Serde} + * @param type to be (de-)serialized + */ + public Serde configureForValues(final Serde serde, final Map configOverrides) { + return this.configureForValues(create(serde, configOverrides)); + } + + /** + * Configure a {@code Serde} for keys using {@link #kafkaProperties} + * @param serde serde to configure + * @return configured {@code Serde} + * @param type to be (de-)serialized + */ + public Serde configureForKeys(final Serde serde) { + return this.configureForKeys(create(serde)); + } + + /** + * Configure a {@code Serde} for keys using {@link #kafkaProperties} and config overrides + * @param serde serde to configure + * @param configOverrides configuration overrides + * @return configured {@code Serde} + * @param type to be (de-)serialized + */ + public Serde configureForKeys(final Serde serde, final Map configOverrides) { + return this.configureForKeys(create(serde, configOverrides)); + } + + /** + * Configure a {@code Serializer} for values using {@link #kafkaProperties} + * @param serializer serializer to configure + * @return configured {@code Serializer} + * @param type to be (de-)serialized + */ + public Serializer configureForValues(final Serializer serializer) { + return this.configureForValues(create(serializer)); + } + + /** + * Configure a {@code Serializer} for values using {@link #kafkaProperties} and config overrides + * @param serializer serializer to configure + * @param configOverrides configuration overrides + * @return configured {@code Serializer} + * @param type to be (de-)serialized + */ + public Serializer configureForValues(final Serializer serializer, + final Map configOverrides) { + return this.configureForValues(create(serializer, configOverrides)); + } + + /** + * Configure a {@code Serializer} for keys using {@link #kafkaProperties} + * @param serializer serializer to configure + * @return configured {@code Serializer} + * @param type to be (de-)serialized + */ + public Serializer configureForKeys(final Serializer serializer) { + return this.configureForKeys(create(serializer)); + } + + /** + * Configure a {@code Serializer} for keys using {@link #kafkaProperties} and config overrides + * @param serializer serializer to configure + * @param configOverrides configuration overrides + * @return configured {@code Serializer} + * @param type to be (de-)serialized + */ + public Serializer configureForKeys(final Serializer serializer, + final Map configOverrides) { + return this.configureForKeys(create(serializer, configOverrides)); + } + + /** + * Configure a {@code Preconfigured} for values object using {@link #kafkaProperties} + * @param preconfigured pre-configured {@link Serde} or {@link Serializer} + * @return configured instance + * @param type of configured instance + */ + public T configureForValues(final Preconfigured preconfigured) { + return preconfigured.configureForValues(this.kafkaProperties); + } + + /** + * Configure a {@code Preconfigured} for keys object using {@link #kafkaProperties} + * @param preconfigured pre-configured {@link Serde} or {@link Serializer} + * @return configured instance + * @param type of configured instance + */ + public T configureForKeys(final Preconfigured preconfigured) { + return preconfigured.configureForKeys(this.kafkaProperties); + } + +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredApp.java new file mode 100644 index 00000000..9bdab008 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredApp.java @@ -0,0 +1,42 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * An application with a corresponding configuration + * + * @param type of executable app after configuring {@link KafkaEndpointConfig} + */ +public interface ConfiguredApp extends AutoCloseable { + /** + * Create an executable app using the provided {@code KafkaEndpointConfig} + * @param endpointConfig endpoint to run app on + * @return executable streams app + */ + E withEndpoint(KafkaEndpointConfig endpointConfig); + + @Override + void close(); +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredProducerApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredProducerApp.java new file mode 100644 index 00000000..de2f409d --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredProducerApp.java @@ -0,0 +1,141 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyMap; + +import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer; +import java.util.HashMap; +import java.util.Map; +import lombok.Getter; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringSerializer; + +/** + * A {@link ProducerApp} with a corresponding {@link AppConfiguration} + * @param type of {@link ProducerApp} + */ +@RequiredArgsConstructor +public class ConfiguredProducerApp implements ConfiguredApp> { + @Getter + private final @NonNull T app; + private final @NonNull AppConfiguration configuration; + + private static Map createBaseConfig(final KafkaEndpointConfig endpointConfig) { + final Map kafkaConfig = new HashMap<>(); + + if (endpointConfig.isSchemaRegistryConfigured()) { + kafkaConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class); + kafkaConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class); + } else { + kafkaConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); + kafkaConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); + } + + kafkaConfig.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 1); + kafkaConfig.put(ProducerConfig.ACKS_CONFIG, "all"); + + // compression + kafkaConfig.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "gzip"); + + return kafkaConfig; + } + + /** + *

This method creates the configuration to run a {@link ProducerApp}.

+ * Configuration is created in the following order + *
    + *
  • + * {@link ProducerConfig#KEY_SERIALIZER_CLASS_CONFIG} and + * {@link ProducerConfig#VALUE_SERIALIZER_CLASS_CONFIG} are configured based on + * {@link KafkaEndpointConfig#isSchemaRegistryConfigured()}. + * If Schema Registry is configured, {@link SpecificAvroSerializer} is used, otherwise + * {@link StringSerializer} is used. + * Additionally, the following is configured: + *
    +     * max.in.flight.requests.per.connection=1
    +     * acks=all
    +     * compression.type=gzip
    +     * 
    + *
  • + *
  • + * Configs provided by {@link ProducerApp#createKafkaProperties()} + *
  • + *
  • + * Configs provided via environment variables (see + * {@link EnvironmentStreamsConfigParser#parseVariables(Map)}) + *
  • + *
  • + * Configs provided by {@link AppConfiguration#getKafkaConfig()} + *
  • + *
  • + * Configs provided by {@link KafkaEndpointConfig#createKafkaProperties()} + *
  • + *
+ * + * @param endpointConfig endpoint to run app on + * @return Kafka configuration + */ + public Map getKafkaProperties(final KafkaEndpointConfig endpointConfig) { + final KafkaPropertiesFactory propertiesFactory = this.createPropertiesFactory(endpointConfig); + return propertiesFactory.createKafkaProperties(emptyMap()); + } + + /** + * Create an {@code ExecutableProducerApp} using the provided {@code KafkaEndpointConfig} + * @return {@code ExecutableProducerApp} + */ + @Override + public ExecutableProducerApp withEndpoint(final KafkaEndpointConfig endpointConfig) { + final ProducerTopicConfig topics = this.getTopics(); + final Map kafkaProperties = this.getKafkaProperties(endpointConfig); + return new ExecutableProducerApp<>(topics, kafkaProperties, this.app); + } + + /** + * Get topic configuration + * @return topic configuration + */ + public ProducerTopicConfig getTopics() { + return this.configuration.getTopics(); + } + + @Override + public void close() { + this.app.close(); + } + + private KafkaPropertiesFactory createPropertiesFactory(final KafkaEndpointConfig endpointConfig) { + final Map baseConfig = createBaseConfig(endpointConfig); + return KafkaPropertiesFactory.builder() + .baseConfig(baseConfig) + .app(this.app) + .configuration(this.configuration) + .endpointConfig(endpointConfig) + .build(); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredStreamsApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredStreamsApp.java new file mode 100644 index 00000000..efe0a3d2 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ConfiguredStreamsApp.java @@ -0,0 +1,181 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import lombok.Getter; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.Serdes.StringSerde; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; + +/** + * A {@link StreamsApp} with a corresponding {@link AppConfiguration} + * @param type of {@link StreamsApp} + */ +@RequiredArgsConstructor +public class ConfiguredStreamsApp implements ConfiguredApp> { + @Getter + private final @NonNull T app; + private final @NonNull AppConfiguration configuration; + + private static Map createBaseConfig(final KafkaEndpointConfig endpointConfig) { + final Map kafkaConfig = new HashMap<>(); + + if (endpointConfig.isSchemaRegistryConfigured()) { + kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); + kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); + } else { + kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); + kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); + } + + // exactly once and order + kafkaConfig.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE_V2); + kafkaConfig.put(StreamsConfig.producerPrefix(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION), 1); + + kafkaConfig.put(StreamsConfig.producerPrefix(ProducerConfig.ACKS_CONFIG), "all"); + + // compression + kafkaConfig.put(StreamsConfig.producerPrefix(ProducerConfig.COMPRESSION_TYPE_CONFIG), "gzip"); + + return kafkaConfig; + } + + /** + *

This method creates the configuration to run a {@link StreamsApp}.

+ * Configuration is created in the following order + *
    + *
  • + * {@link StreamsConfig#DEFAULT_KEY_SERDE_CLASS_CONFIG} and + * {@link StreamsConfig#DEFAULT_VALUE_SERDE_CLASS_CONFIG} are configured based on + * {@link KafkaEndpointConfig#isSchemaRegistryConfigured()}. + * If Schema Registry is configured, {@link SpecificAvroSerde} is used, otherwise {@link StringSerde} is + * used. + * Additionally, exactly-once, in-order, and compression are configured: + *
    +     * processing.guarantee=exactly_once_v2
    +     * producer.max.in.flight.requests.per.connection=1
    +     * producer.acks=all
    +     * producer.compression.type=gzip
    +     * 
    + *
  • + *
  • + * Configs provided by {@link StreamsApp#createKafkaProperties()} + *
  • + *
  • + * Configs provided via environment variables (see + * {@link EnvironmentStreamsConfigParser#parseVariables(Map)}) + *
  • + *
  • + * Configs provided by {@link AppConfiguration#getKafkaConfig()} + *
  • + *
  • + * Configs provided by {@link KafkaEndpointConfig#createKafkaProperties()} + *
  • + *
  • + * {@link StreamsConfig#APPLICATION_ID_CONFIG} is configured using + * {@link StreamsApp#getUniqueAppId(StreamsTopicConfig)} + *
  • + *
+ * + * @param endpointConfig endpoint to run app on + * @return Kafka configuration + */ + public Map getKafkaProperties(final KafkaEndpointConfig endpointConfig) { + final KafkaPropertiesFactory propertiesFactory = this.createPropertiesFactory(endpointConfig); + return propertiesFactory.createKafkaProperties(Map.of( + StreamsConfig.APPLICATION_ID_CONFIG, this.getUniqueAppId() + )); + } + + /** + * Get unique application identifier of {@code StreamsApp} + * @return unique application identifier + * @see StreamsApp#getUniqueAppId(StreamsTopicConfig) + */ + public String getUniqueAppId() { + return Objects.requireNonNull(this.app.getUniqueAppId(this.getTopics())); + } + + /** + * Get topic configuration + * @return topic configuration + */ + public StreamsTopicConfig getTopics() { + return this.configuration.getTopics(); + } + + /** + * Create an {@code ExecutableStreamsApp} using the provided {@code KafkaEndpointConfig} + * @return {@code ExecutableStreamsApp} + */ + @Override + public ExecutableStreamsApp withEndpoint(final KafkaEndpointConfig endpointConfig) { + final Map kafkaProperties = this.getKafkaProperties(endpointConfig); + final Topology topology = this.createTopology(kafkaProperties); + final EffectiveAppConfiguration effectiveConfiguration = + new EffectiveAppConfiguration<>(this.getTopics(), kafkaProperties); + return ExecutableStreamsApp.builder() + .topology(topology) + .config(new StreamsConfig(kafkaProperties)) + .app(this.app) + .effectiveConfig(effectiveConfiguration) + .build(); + } + + /** + * Create the topology of the Kafka Streams app + * + * @param kafkaProperties configuration that should be used by clients to configure Kafka utilities + * @return topology of the Kafka Streams app + */ + public Topology createTopology(final Map kafkaProperties) { + final TopologyBuilder topologyBuilder = new TopologyBuilder(this.getTopics(), kafkaProperties); + this.app.buildTopology(topologyBuilder); + return topologyBuilder.build(); + } + + @Override + public void close() { + this.app.close(); + } + + private KafkaPropertiesFactory createPropertiesFactory(final KafkaEndpointConfig endpointConfig) { + final Map baseConfig = createBaseConfig(endpointConfig); + return KafkaPropertiesFactory.builder() + .baseConfig(baseConfig) + .app(this.app) + .configuration(this.configuration) + .endpointConfig(endpointConfig) + .build(); + } + +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/DefaultStreamsUncaughtExceptionHandler.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/DefaultConfigurable.java similarity index 78% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/DefaultStreamsUncaughtExceptionHandler.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/DefaultConfigurable.java index 341a1ad4..4ef82eaf 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/DefaultStreamsUncaughtExceptionHandler.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/DefaultConfigurable.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,11 +24,11 @@ package com.bakdata.kafka; -import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; +import java.util.Map; -class DefaultStreamsUncaughtExceptionHandler implements StreamsUncaughtExceptionHandler { +class DefaultConfigurable implements Configurable { @Override - public StreamThreadExceptionResponse handle(final Throwable e) { - return StreamThreadExceptionResponse.SHUTDOWN_CLIENT; + public T configure(final Map config, final boolean isKey) { + return null; } } diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/DefaultStreamsUncaughtExceptionHandler.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/DefaultStreamsUncaughtExceptionHandler.java new file mode 100644 index 00000000..ccb4ca52 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/DefaultStreamsUncaughtExceptionHandler.java @@ -0,0 +1,39 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; + +/** + * {@code StreamsUncaughtExceptionHandler} that does not handle the exception and responds with + * {@link org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse#SHUTDOWN_CLIENT}. Mimics default behavior of {@link org.apache.kafka.streams.KafkaStreams} if no {@code StreamsUncaughtExceptionHandler} has been configured. + * @see org.apache.kafka.streams.KafkaStreams#setUncaughtExceptionHandler(StreamsUncaughtExceptionHandler) + */ +class DefaultStreamsUncaughtExceptionHandler implements StreamsUncaughtExceptionHandler { + @Override + public StreamThreadExceptionResponse handle(final Throwable e) { + return StreamThreadExceptionResponse.SHUTDOWN_CLIENT; + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/EffectiveAppConfiguration.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/EffectiveAppConfiguration.java new file mode 100644 index 00000000..4c594344 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/EffectiveAppConfiguration.java @@ -0,0 +1,56 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import com.bakdata.kafka.util.ImprovedAdminClient; +import java.util.Map; +import lombok.EqualsAndHashCode; +import lombok.NonNull; +import lombok.Value; + +/** + * Configuration for setting up an app + * @param type of topic config + * @see StreamsApp#setup(EffectiveAppConfiguration) + * @see StreamsApp#setupCleanUp(EffectiveAppConfiguration) + * @see ProducerApp#setup(EffectiveAppConfiguration) + * @see ProducerApp#setupCleanUp(EffectiveAppConfiguration) + */ +@Value +@EqualsAndHashCode +public class EffectiveAppConfiguration { + @NonNull + T topics; + @NonNull + Map kafkaProperties; + + /** + * Create a new {@code ImprovedAdminClient} using {@link #kafkaProperties} + * @return {@code ImprovedAdminClient} + */ + public ImprovedAdminClient createAdminClient() { + return ImprovedAdminClient.create(this.kafkaProperties); + } +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/EnvironmentStreamsConfigParser.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/EnvironmentStreamsConfigParser.java similarity index 98% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/EnvironmentStreamsConfigParser.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/EnvironmentStreamsConfigParser.java index 8f283b5a..b2bd4f97 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/EnvironmentStreamsConfigParser.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/EnvironmentStreamsConfigParser.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableApp.java new file mode 100644 index 00000000..bb55cd39 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableApp.java @@ -0,0 +1,56 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * An application with a corresponding topic and Kafka configuration + * @param type returned by {@link #createRunner()} and {@link #createRunner(Object)} + * @param type returned by {@link #createCleanUpRunner()} + * @param type of options to create runner + */ +public interface ExecutableApp extends AutoCloseable { + + @Override + void close(); + + /** + * Create {@code Runner} in order to run application with default options + * @return {@code Runner} + */ + R createRunner(); + + /** + * Create {@code Runner} in order to run application + * @param options options for creating runner + * @return {@code Runner} + */ + R createRunner(O options); + + /** + * Create {@code CleanUpRunner} in order to clean application + * @return {@code CleanUpRunner} + */ + C createCleanUpRunner(); +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableProducerApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableProducerApp.java new file mode 100644 index 00000000..d4b1b370 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableProducerApp.java @@ -0,0 +1,81 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Map; +import lombok.AccessLevel; +import lombok.Getter; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; + +/** + * A {@link ProducerApp} with a corresponding {@link ProducerTopicConfig} and Kafka configuration + * @param type of {@link ProducerApp} + */ +@RequiredArgsConstructor(access = AccessLevel.PACKAGE) +@Getter +public class ExecutableProducerApp + implements ExecutableApp { + private final @NonNull ProducerTopicConfig topics; + private final @NonNull Map kafkaProperties; + private final @NonNull T app; + + /** + * Create {@code ProducerCleanUpRunner} in order to clean application + * @return {@code ProducerCleanUpRunner} + */ + @Override + public ProducerCleanUpRunner createCleanUpRunner() { + final EffectiveAppConfiguration configuration = this.createEffectiveConfiguration(); + final ProducerCleanUpConfiguration configurer = this.app.setupCleanUp(configuration); + return ProducerCleanUpRunner.create(this.topics, this.kafkaProperties, configurer); + } + + /** + * Create {@code ProducerRunner} in order to run application + * @return {@code ProducerRunner} + */ + @Override + public ProducerRunner createRunner() { + return this.createRunner(ProducerExecutionOptions.builder().build()); + } + + @Override + public ProducerRunner createRunner(final ProducerExecutionOptions options) { + final ProducerBuilder producerBuilder = new ProducerBuilder(this.topics, this.kafkaProperties); + final EffectiveAppConfiguration configuration = this.createEffectiveConfiguration(); + this.app.setup(configuration); + return new ProducerRunner(this.app.buildRunnable(producerBuilder)); + } + + @Override + public void close() { + this.app.close(); + } + + private EffectiveAppConfiguration createEffectiveConfiguration() { + return new EffectiveAppConfiguration<>(this.topics, this.kafkaProperties); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableStreamsApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableStreamsApp.java new file mode 100644 index 00000000..25009786 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ExecutableStreamsApp.java @@ -0,0 +1,88 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import lombok.AccessLevel; +import lombok.Builder; +import lombok.Getter; +import lombok.NonNull; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; + +/** + * A {@link StreamsApp} with a corresponding {@link Topology} and {@link StreamsConfig} + * @param type of {@link ProducerApp} + */ +@Builder(access = AccessLevel.PACKAGE) +@Getter +public class ExecutableStreamsApp + implements ExecutableApp { + + @Getter + private final @NonNull Topology topology; + @Getter + private final @NonNull StreamsConfig config; + @Getter + private final @NonNull T app; + private final @NonNull EffectiveAppConfiguration effectiveConfig; + + /** + * Create {@code StreamsCleanUpRunner} in order to clean application + * @return {@code StreamsCleanUpRunner} + */ + @Override + public StreamsCleanUpRunner createCleanUpRunner() { + final StreamsCleanUpConfiguration configurer = this.app.setupCleanUp(this.effectiveConfig); + return StreamsCleanUpRunner.create(this.topology, this.config, configurer); + } + + /** + * Create {@code StreamsRunner} in order to run application with default {@link StreamsExecutionOptions} + * @return {@code StreamsRunner} + * @see StreamsRunner#StreamsRunner(Topology, StreamsConfig) + */ + @Override + public StreamsRunner createRunner() { + this.app.setup(this.effectiveConfig); + return new StreamsRunner(this.topology, this.config); + } + + /** + * Create {@code StreamsRunner} in order to run application + * @param executionOptions options for running Kafka Streams application + * @return {@code StreamsRunner} + * @see StreamsRunner#StreamsRunner(Topology, StreamsConfig, StreamsExecutionOptions) + */ + @Override + public StreamsRunner createRunner(final StreamsExecutionOptions executionOptions) { + this.app.setup(this.effectiveConfig); + return new StreamsRunner(this.topology, this.config, executionOptions); + } + + @Override + public void close() { + this.app.close(); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/HasCleanHook.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/HasCleanHook.java new file mode 100644 index 00000000..d6238982 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/HasCleanHook.java @@ -0,0 +1,40 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * Interface for performing actions when cleaning apps + * @param self for chaining + */ +@FunctionalInterface +public interface HasCleanHook { + /** + * Register a hook that is invoked when cleaning apps + * @param hook factory to create hook from + * @return self for chaining + */ + SELF registerCleanHook(Runnable hook); + +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/HasTopicHooks.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/HasTopicHooks.java new file mode 100644 index 00000000..f3433e16 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/HasTopicHooks.java @@ -0,0 +1,53 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * Interface for performing actions on topics + * @param self for chaining + */ +@FunctionalInterface +public interface HasTopicHooks { + /** + * Register a hook that is invoked when performing actions on topics + * + * @param hook Action to run. Topic is passed as parameter + * @return self for chaining + */ + SELF registerTopicHook(TopicHook hook); + + /** + * Hook for performing actions on topics + */ + interface TopicHook { + /** + * Called when a topic is deleted + * @param topic name of the topic + */ + default void deleted(final String topic) { + // do nothing + } + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ImprovedStreamsConfig.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ImprovedStreamsConfig.java new file mode 100644 index 00000000..95b9a484 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ImprovedStreamsConfig.java @@ -0,0 +1,69 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import lombok.NonNull; +import lombok.Value; +import org.apache.kafka.streams.StreamsConfig; + +/** + * Class for simplified access to configs provided by {@link StreamsConfig} + */ +@Value +public class ImprovedStreamsConfig { + + @NonNull + StreamsConfig streamsConfig; + + /** + * Get the application id of the underlying {@link StreamsConfig} + * @return application id + * @see StreamsConfig#APPLICATION_ID_CONFIG + */ + public String getAppId() { + return this.streamsConfig.getString(StreamsConfig.APPLICATION_ID_CONFIG); + } + + /** + * Get the bootstrap servers of the underlying {@link StreamsConfig} + * @return list of bootstrap servers + * @see StreamsConfig#BOOTSTRAP_SERVERS_CONFIG + */ + public List getBoostrapServers() { + return this.streamsConfig.getList(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG); + } + + /** + * Get all configs of the underlying {@link StreamsConfig} + * @return Kafka configs + * @see StreamsConfig#originals() + */ + public Map getKafkaProperties() { + return Collections.unmodifiableMap(this.streamsConfig.originals()); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/KafkaEndpointConfig.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/KafkaEndpointConfig.java new file mode 100644 index 00000000..5aa42876 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/KafkaEndpointConfig.java @@ -0,0 +1,68 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import lombok.Builder; +import lombok.NonNull; +import org.apache.kafka.streams.StreamsConfig; + +/** + * Configuration to connect to Kafka infrastructure, i.e., brokers and optionally schema registry. + */ +@Builder +public class KafkaEndpointConfig { + private final @NonNull String brokers; + private final String schemaRegistryUrl; + + /** + * Create Kafka properties to connect to infrastructure. + * The following properties are configured: + *
    + *
  • {@code bootstrap.servers}
  • + *
  • {@code schema.registry.url}
  • + *
+ * @return properties used for connecting to Kafka + */ + public Map createKafkaProperties() { + final Map kafkaConfig = new HashMap<>(); + kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokers); + if (this.isSchemaRegistryConfigured()) { + kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, this.schemaRegistryUrl); + } + return Collections.unmodifiableMap(kafkaConfig); + } + + /** + * Check if schema registry has been configured + * @return true if {@link #schemaRegistryUrl} has been configured + */ + public boolean isSchemaRegistryConfigured() { + return this.schemaRegistryUrl != null; + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/KafkaPropertiesFactory.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/KafkaPropertiesFactory.java new file mode 100644 index 00000000..aecd5dc2 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/KafkaPropertiesFactory.java @@ -0,0 +1,49 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import lombok.Builder; +import lombok.NonNull; + +@Builder +class KafkaPropertiesFactory { + private final @NonNull Map baseConfig; + private final @NonNull App app; + private final @NonNull AppConfiguration configuration; + private final @NonNull KafkaEndpointConfig endpointConfig; + + Map createKafkaProperties(final Map configOverrides) { + final Map kafkaConfig = new HashMap<>(this.baseConfig); + kafkaConfig.putAll(this.app.createKafkaProperties()); + kafkaConfig.putAll(EnvironmentStreamsConfigParser.parseVariables(System.getenv())); + kafkaConfig.putAll(this.configuration.getKafkaConfig()); + kafkaConfig.putAll(this.endpointConfig.createKafkaProperties()); + kafkaConfig.putAll(configOverrides); + return Collections.unmodifiableMap(kafkaConfig); + } +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/NoOpStateListener.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/NoOpStateListener.java similarity index 89% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/NoOpStateListener.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/NoOpStateListener.java index 51563b92..cfc312f6 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/NoOpStateListener.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/NoOpStateListener.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -27,6 +27,10 @@ import org.apache.kafka.streams.KafkaStreams.State; import org.apache.kafka.streams.KafkaStreams.StateListener; +/** + * {@code StateListener} that does nothing. + * @see org.apache.kafka.streams.KafkaStreams#setStateListener(StateListener) + */ class NoOpStateListener implements StateListener { @Override public void onChange(final State newState, final State oldState) { diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Preconfigured.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Preconfigured.java new file mode 100644 index 00000000..ee9bc3cb --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Preconfigured.java @@ -0,0 +1,155 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyMap; + +import java.util.HashMap; +import java.util.Map; +import lombok.AccessLevel; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.common.serialization.Serde; +import org.apache.kafka.common.serialization.Serializer; + +/** + * A pre-configured {@link Serde} or {@link Serializer}, i.e., configs and isKey are set. + * @param type of underlying configurable + */ +@RequiredArgsConstructor(access = AccessLevel.PRIVATE) +public final class Preconfigured { + private final @NonNull Configurable configurable; + private final @NonNull Map configOverrides; + + private Preconfigured(final Configurable configurable) { + this(configurable, emptyMap()); + } + + /** + * Create a pre-configured {@code Serde} that returns {@code null} when calling + * {@link Preconfigured#configureForKeys(Map)} and {@link Preconfigured#configureForValues(Map)} + * @return pre-configured serde + * @param type (de-)serialized by the {@code Serde} + */ + public static Preconfigured> defaultSerde() { + return new Preconfigured<>(new DefaultConfigurable<>()); + } + + /** + * Pre-configure a {@code Serde} + * @param serde {@code Serde} to pre-configure + * @return pre-configured serde + * @param type of {@link Serde} + * @param type (de-)serialized by the {@code Serde} + */ + public static , T> Preconfigured create(final S serde) { + return new Preconfigured<>(configurable(serde)); + } + + /** + * Pre-configure a {@code Serde} with config overrides + * @param serde {@code Serde} to pre-configure + * @param configOverrides configs passed to {@link Serde#configure(Map, boolean)} + * @return pre-configured serde + * @param type of {@link Serde} + * @param type (de-)serialized by the {@code Serde} + */ + public static , T> Preconfigured create(final S serde, + final Map configOverrides) { + return new Preconfigured<>(configurable(serde), configOverrides); + } + + /** + * Create a pre-configured {@code Serializer} that returns {@code null} when calling + * {@link Preconfigured#configureForKeys(Map)} and {@link Preconfigured#configureForValues(Map)} + * @return pre-configured serializer + * @param type (de-)serialized by the {@code Serializer} + */ + public static Preconfigured> defaultSerializer() { + return new Preconfigured<>(new DefaultConfigurable<>()); + } + + /** + * Pre-configure a {@code Serializer} + * @param serializer {@code Serializer} to pre-configure + * @return pre-configured serializer + * @param type of {@link Serializer} + * @param type serialized by the {@code Serializer} + */ + public static , T> Preconfigured create(final S serializer) { + return new Preconfigured<>(configurable(serializer)); + } + + /** + * Pre-configure a {@code Serializer} + * @param serializer {@code Serializer} to pre-configure + * @param configOverrides configs passed to {@link Serializer#configure(Map, boolean)} + * @return pre-configured serializer + * @param type of {@link Serializer} + * @param type serialized by the {@code Serializer} + */ + public static , T> Preconfigured create(final S serializer, + final Map configOverrides) { + return new Preconfigured<>(configurable(serializer), configOverrides); + } + + private static , T> ConfigurableSerde configurable(final S serde) { + return new ConfigurableSerde<>(serde); + } + + private static , T> ConfigurableSerializer configurable(final S serializer) { + return new ConfigurableSerializer<>(serializer); + } + + /** + * Configure for values using a base config + * @param baseConfig Base config. {@link #configOverrides} override properties of base config. + * @return configured instance + */ + public T configureForValues(final Map baseConfig) { + return this.configure(baseConfig, false); + } + + /** + * Configure for keys using a base config + * @param baseConfig Base config. {@link #configOverrides} override properties of base config. + * @return configured instance + */ + public T configureForKeys(final Map baseConfig) { + return this.configure(baseConfig, true); + } + + private T configure(final Map baseConfig, final boolean isKey) { + final Map serializerConfig = this.mergeConfig(baseConfig); + return this.configurable.configure(serializerConfig, isKey); + } + + private Map mergeConfig(final Map baseConfig) { + final Map config = new HashMap<>(baseConfig); + config.putAll(this.configOverrides); + return config; + } + +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerApp.java new file mode 100644 index 00000000..42cee6e6 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerApp.java @@ -0,0 +1,49 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * Application that defines how to produce messages to Kafka and necessary configurations + */ +@FunctionalInterface +public interface ProducerApp extends App { + + /** + * Create a runnable that produces Kafka messages + * @param builder provides all runtime application configurations + * @return {@code ProducerRunnable} + */ + ProducerRunnable buildRunnable(ProducerBuilder builder); + + /** + * @return {@code ProducerCleanUpConfiguration} + * @see ProducerCleanUpRunner + */ + @Override + default ProducerCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration configuration) { + return new ProducerCleanUpConfiguration(); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerBuilder.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerBuilder.java new file mode 100644 index 00000000..df01fdad --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerBuilder.java @@ -0,0 +1,90 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Map; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.Value; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.common.serialization.Serializer; + +/** + * Provides all runtime configurations when running a {@link ProducerApp} + * + * @see ProducerApp#buildRunnable(ProducerBuilder) + */ +@RequiredArgsConstructor +@Value +public class ProducerBuilder { + + @NonNull + ProducerTopicConfig topics; + @NonNull + Map kafkaProperties; + + /** + * Create a new {@code Producer} using {@link #kafkaProperties} + * @return {@code Producer} + * @param type of keys + * @param type of values + * @see KafkaProducer#KafkaProducer(Map) + */ + public Producer createProducer() { + return new KafkaProducer<>(this.kafkaProperties); + } + + /** + * Create a new {@code Producer} using {@link #kafkaProperties} and provided {@code Serializers} + * @param keySerializer {@code Serializer} to use for keys + * @param valueSerializer {@code Serializer} to use for values + * @return {@code Producer} + * @param type of keys + * @param type of values + * @see KafkaProducer#KafkaProducer(Map, Serializer, Serializer) + */ + public Producer createProducer(final Serializer keySerializer, + final Serializer valueSerializer) { + return new KafkaProducer<>(this.kafkaProperties, keySerializer, valueSerializer); + } + + /** + * Create {@code Configurator} to configure {@link org.apache.kafka.common.serialization.Serde} and + * {@link org.apache.kafka.common.serialization.Serializer} using {@link #kafkaProperties}. + * @return {@code Configurator} + */ + public Configurator createConfigurator() { + return new Configurator(this.kafkaProperties); + } + + /** + * Create {@code EffectiveAppConfiguration} used by this app + * @return {@code EffectiveAppConfiguration} + */ + public EffectiveAppConfiguration createEffectiveConfiguration() { + return new EffectiveAppConfiguration<>(this.topics, this.kafkaProperties); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerCleanUpConfiguration.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerCleanUpConfiguration.java new file mode 100644 index 00000000..aa563091 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerCleanUpConfiguration.java @@ -0,0 +1,64 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.ArrayList; +import java.util.Collection; +import lombok.NonNull; + +/** + * Provides configuration options for {@link ProducerCleanUpRunner} + */ +public class ProducerCleanUpConfiguration + implements HasTopicHooks, HasCleanHook { + private final @NonNull Collection topicHooks = new ArrayList<>(); + private final @NonNull Collection cleanHooks = new ArrayList<>(); + + /** + * Register a hook that is executed whenever a topic has been deleted by the cleanup runner. + */ + @Override + public ProducerCleanUpConfiguration registerTopicHook(final TopicHook hook) { + this.topicHooks.add(hook); + return this; + } + + /** + * Register an action that is executed after {@link ProducerCleanUpRunner#clean()} has finished + */ + @Override + public ProducerCleanUpConfiguration registerCleanHook(final Runnable hook) { + this.cleanHooks.add(hook); + return this; + } + + void runCleanHooks() { + this.cleanHooks.forEach(Runnable::run); + } + + void runTopicDeletionHooks(final String topic) { + this.topicHooks.forEach(hook -> hook.deleted(topic)); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerCleanUpRunner.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerCleanUpRunner.java new file mode 100644 index 00000000..bfddf6d1 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerCleanUpRunner.java @@ -0,0 +1,114 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import com.bakdata.kafka.util.ImprovedAdminClient; +import java.util.Map; +import lombok.AccessLevel; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.jooq.lambda.Seq; + + +/** + * Delete all output topics specified by a {@link ProducerTopicConfig} + */ +@Slf4j +@RequiredArgsConstructor(access = AccessLevel.PRIVATE) +public final class ProducerCleanUpRunner implements CleanUpRunner { + private final @NonNull ProducerTopicConfig topics; + private final @NonNull Map kafkaProperties; + private final @NonNull ProducerCleanUpConfiguration cleanHooks; + + /** + * Create a new {@code ProducerCleanUpRunner} with default {@link ProducerCleanUpConfiguration} + * + * @param topics topic configuration to infer output topics that require cleaning + * @param kafkaProperties configuration to connect to Kafka admin tools + * @return {@code ProducerCleanUpRunner} + */ + public static ProducerCleanUpRunner create(@NonNull final ProducerTopicConfig topics, + @NonNull final Map kafkaProperties) { + return create(topics, kafkaProperties, new ProducerCleanUpConfiguration()); + } + + /** + * Create a new {@code ProducerCleanUpRunner} + * + * @param topics topic configuration to infer output topics that require cleaning + * @param kafkaProperties configuration to connect to Kafka admin tools + * @param configuration configuration for hooks that are called when running {@link #clean()} + * @return {@code ProducerCleanUpRunner} + */ + public static ProducerCleanUpRunner create(@NonNull final ProducerTopicConfig topics, + @NonNull final Map kafkaProperties, + @NonNull final ProducerCleanUpConfiguration configuration) { + return new ProducerCleanUpRunner(topics, kafkaProperties, configuration); + } + + /** + * Delete all output topics + */ + @Override + public void clean() { + try (final ImprovedAdminClient adminClient = this.createAdminClient()) { + final Task task = new Task(adminClient); + task.clean(); + } + } + + private ImprovedAdminClient createAdminClient() { + return ImprovedAdminClient.create(this.kafkaProperties); + } + + @RequiredArgsConstructor + private class Task { + + private final @NonNull ImprovedAdminClient adminClient; + + private void clean() { + this.deleteTopics(); + ProducerCleanUpRunner.this.cleanHooks.runCleanHooks(); + } + + private void deleteTopics() { + final Iterable outputTopics = this.getAllOutputTopics(); + outputTopics.forEach(this::deleteTopic); + } + + private void deleteTopic(final String topic) { + this.adminClient.getSchemaTopicClient() + .deleteTopicAndResetSchemaRegistry(topic); + ProducerCleanUpRunner.this.cleanHooks.runTopicDeletionHooks(topic); + } + + private Iterable getAllOutputTopics() { + return Seq.of(ProducerCleanUpRunner.this.topics.getOutputTopic()) + .concat(ProducerCleanUpRunner.this.topics.getExtraOutputTopics().values()); + } + } + +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerExecutionOptions.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerExecutionOptions.java new file mode 100644 index 00000000..95328e2f --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerExecutionOptions.java @@ -0,0 +1,34 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import lombok.Builder; + +/** + * Options to run a Kafka Producer app using {@link ProducerRunner} + */ +@Builder +public final class ProducerExecutionOptions { +} diff --git a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaProducerApplication.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerRunnable.java similarity index 73% rename from streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaProducerApplication.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerRunnable.java index 2fea1acf..75588546 100644 --- a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaProducerApplication.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerRunnable.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,16 +24,19 @@ package com.bakdata.kafka; -import java.util.function.Consumer; - /** - * Kafka Producer Application that automatically removes files associated with {@code LargeMessageSerializer} + * Produce messages to Kafka */ -public abstract class LargeMessageKafkaProducerApplication extends KafkaProducerApplication { +@FunctionalInterface +public interface ProducerRunnable extends AutoCloseable { + + /** + * Produce messages to Kafka + */ + void run(); @Override - protected Consumer createTopicCleanUpHook() { - return LargeMessageKafkaApplicationUtils.createLargeMessageCleanUpHook(this); + default void close() { + // do nothing by default } - } diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerRunner.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerRunner.java new file mode 100644 index 00000000..2c55a987 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerRunner.java @@ -0,0 +1,51 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** + * Runs a Kafka Producer application + */ +@RequiredArgsConstructor +@Slf4j +public class ProducerRunner implements Runner { + + private final @NonNull ProducerRunnable runnable; + + @Override + public void close() { + log.info("Closing producer"); + this.runnable.close(); + } + + @Override + public void run() { + log.info("Starting producer"); + this.runnable.run(); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerTopicConfig.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerTopicConfig.java new file mode 100644 index 00000000..80d84c5a --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/ProducerTopicConfig.java @@ -0,0 +1,62 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyMap; + +import com.google.common.base.Preconditions; +import java.util.Map; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.NonNull; +import lombok.Value; + +/** + * Provides topic configuration for a {@link ProducerApp} + */ +@Builder +@Value +@EqualsAndHashCode +public class ProducerTopicConfig { + + String outputTopic; + /** + * Extra output topics that are identified by a role + */ + @Builder.Default + @NonNull Map extraOutputTopics = emptyMap(); + + /** + * Get extra output topic for a specified role + * + * @param role role of extra output topic + * @return topic name + */ + public String getOutputTopic(final String role) { + final String topic = this.extraOutputTopics.get(role); + Preconditions.checkNotNull(topic, "No output topic for role '%s' available", role); + return topic; + } +} diff --git a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaStreamsApplication.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Runner.java similarity index 72% rename from streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaStreamsApplication.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/Runner.java index 18b43bf6..e3d47a5c 100644 --- a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaStreamsApplication.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/Runner.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -25,14 +25,16 @@ package com.bakdata.kafka; /** - * Kafka Streams Application that automatically removes files associated with {@code LargeMessageSerde} + * Interface for running applications */ -public abstract class LargeMessageKafkaStreamsApplication extends KafkaStreamsApplication { +public interface Runner extends AutoCloseable, Runnable { @Override - protected void cleanUpRun(final CleanUpRunner cleanUpRunner) { - LargeMessageKafkaApplicationUtils.registerLargeMessageCleanUpHook(this, cleanUpRunner); - super.cleanUpRun(cleanUpRunner); - } + void close(); + /** + * Run the application + */ + @Override + void run(); } diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/RunningStreams.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/RunningStreams.java new file mode 100644 index 00000000..c06c69d9 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/RunningStreams.java @@ -0,0 +1,52 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import com.bakdata.kafka.StreamsExecutionOptions.StreamsExecutionOptionsBuilder; +import java.util.function.Consumer; +import lombok.Builder; +import lombok.NonNull; +import lombok.Value; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; + +/** + * A running {@link KafkaStreams} instance along with its {@link StreamsConfig} and + * {@link org.apache.kafka.streams.Topology} + * + * @see StreamsExecutionOptionsBuilder#onStart(Consumer) + */ +@Builder +@Value +public class RunningStreams { + + @NonNull + ImprovedStreamsConfig config; + @NonNull + Topology topology; + @NonNull + KafkaStreams streams; +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsApp.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsApp.java new file mode 100644 index 00000000..0802720a --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsApp.java @@ -0,0 +1,58 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * Application that defines a Kafka Streams {@link org.apache.kafka.streams.Topology} and necessary configurations + */ +public interface StreamsApp extends App { + + /** + * Build the Kafka Streams {@link org.apache.kafka.streams.Topology} to be run by the app. + * + * @param builder provides all runtime application configurations and supports building the + * {@link org.apache.kafka.streams.Topology} + */ + void buildTopology(TopologyBuilder builder); + + /** + * This must be set to a unique value for every application interacting with your Kafka cluster to ensure internal + * state encapsulation. Could be set to: className-outputTopic + * + * @param topics provides runtime topic configuration + * @return unique application identifier + */ + String getUniqueAppId(StreamsTopicConfig topics); + + /** + * @return {@code StreamsCleanUpConfiguration} + * @see StreamsCleanUpRunner + */ + @Override + default StreamsCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration configuration) { + return new StreamsCleanUpConfiguration(); + } +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/StreamsApplicationException.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsApplicationException.java similarity index 98% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/StreamsApplicationException.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsApplicationException.java index 6073e048..058bddcb 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/StreamsApplicationException.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsApplicationException.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsCleanUpConfiguration.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsCleanUpConfiguration.java new file mode 100644 index 00000000..c9186936 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsCleanUpConfiguration.java @@ -0,0 +1,79 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.ArrayList; +import java.util.Collection; +import lombok.NonNull; + +/** + * Provides configuration options for {@link StreamsCleanUpRunner} + */ +public class StreamsCleanUpConfiguration + implements HasTopicHooks, HasCleanHook { + private final @NonNull Collection topicHooks = new ArrayList<>(); + private final @NonNull Collection cleanHooks = new ArrayList<>(); + private final @NonNull Collection resetHooks = new ArrayList<>(); + + /** + * Register a hook that is executed whenever a topic has been deleted by the cleanup runner. + */ + @Override + public StreamsCleanUpConfiguration registerTopicHook(final TopicHook hook) { + this.topicHooks.add(hook); + return this; + } + + /** + * Register a hook that is executed after {@link StreamsCleanUpRunner#clean()} has finished + */ + @Override + public StreamsCleanUpConfiguration registerCleanHook(final Runnable hook) { + this.cleanHooks.add(hook); + return this; + } + + /** + * Register a hook that is executed after {@link StreamsCleanUpRunner#reset()} has finished + * @param hook factory to create hook from + * @return self for chaining + */ + public StreamsCleanUpConfiguration registerResetHook(final Runnable hook) { + this.resetHooks.add(hook); + return this; + } + + void runCleanHooks() { + this.cleanHooks.forEach(Runnable::run); + } + + void runResetHooks() { + this.resetHooks.forEach(Runnable::run); + } + + void runTopicDeletionHooks(final String topic) { + this.topicHooks.forEach(hook -> hook.deleted(topic)); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsCleanUpRunner.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsCleanUpRunner.java new file mode 100644 index 00000000..d1a47beb --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsCleanUpRunner.java @@ -0,0 +1,256 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import com.bakdata.kafka.util.ConsumerGroupClient; +import com.bakdata.kafka.util.ImprovedAdminClient; +import com.bakdata.kafka.util.TopologyInformation; +import com.google.common.collect.ImmutableList; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.stream.Collectors; +import lombok.AccessLevel; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; +import org.apache.kafka.tools.StreamsResetter; + + +/** + * Clean up the state and artifacts of your Kafka Streams app + */ +@Slf4j +@RequiredArgsConstructor(access = AccessLevel.PRIVATE) +public final class StreamsCleanUpRunner implements CleanUpRunner { + private static final int EXIT_CODE_SUCCESS = 0; + private final TopologyInformation topologyInformation; + private final Topology topology; + private final @NonNull ImprovedStreamsConfig config; + private final @NonNull StreamsCleanUpConfiguration cleanHooks; + + /** + * Create a new {@code StreamsCleanUpRunner} with default {@link StreamsCleanUpConfiguration} + * + * @param topology topology defining the Kafka Streams app + * @param streamsConfig configuration to run topology and connect to Kafka admin tools + * @return {@code StreamsCleanUpRunner} + */ + public static StreamsCleanUpRunner create(final @NonNull Topology topology, + final @NonNull StreamsConfig streamsConfig) { + return create(topology, streamsConfig, new StreamsCleanUpConfiguration()); + } + + /** + * Create a new {@code StreamsCleanUpRunner} + * + * @param topology topology defining the Kafka Streams app + * @param streamsConfig configuration to run topology and connect to Kafka admin tools + * @param configuration configuration for hooks that are called when running {@link #clean()} and {@link #reset()} + * @return {@code StreamsCleanUpRunner} + */ + public static StreamsCleanUpRunner create(final @NonNull Topology topology, + final @NonNull StreamsConfig streamsConfig, final @NonNull StreamsCleanUpConfiguration configuration) { + final ImprovedStreamsConfig config = new ImprovedStreamsConfig(streamsConfig); + final TopologyInformation topologyInformation = new TopologyInformation(topology, config.getAppId()); + return new StreamsCleanUpRunner(topologyInformation, topology, config, configuration); + } + + /** + * Run the
Kafka + * Streams Reset Tool + * + * @param inputTopics list of input topics of the streams app + * @param intermediateTopics list of intermediate topics of the streams app + * @param allTopics list of all topics that exists in the Kafka cluster + * @param streamsAppConfig configuration properties of the streams app + */ + public static void runResetter(final Collection inputTopics, final Collection intermediateTopics, + final Collection allTopics, final ImprovedStreamsConfig streamsAppConfig) { + // StreamsResetter's internal AdminClient can only be configured with a properties file + final String appId = streamsAppConfig.getAppId(); + final File tempFile = createTemporaryPropertiesFile(appId, streamsAppConfig.getKafkaProperties()); + final ImmutableList.Builder argList = ImmutableList.builder() + .add("--application-id", appId) + .add("--bootstrap-server", String.join(",", streamsAppConfig.getBoostrapServers())) + .add("--config-file", tempFile.toString()); + final Collection existingInputTopics = filterExistingTopics(inputTopics, allTopics); + if (!existingInputTopics.isEmpty()) { + argList.add("--input-topics", String.join(",", existingInputTopics)); + } + final Collection existingIntermediateTopics = filterExistingTopics(intermediateTopics, allTopics); + if (!existingIntermediateTopics.isEmpty()) { + argList.add("--intermediate-topics", String.join(",", existingIntermediateTopics)); + } + final String[] args = argList.build().toArray(String[]::new); + final StreamsResetter resetter = new StreamsResetter(); + final int returnCode = resetter.execute(args); + try { + Files.delete(tempFile.toPath()); + } catch (final IOException e) { + log.warn("Error deleting temporary property file", e); + } + if (returnCode != EXIT_CODE_SUCCESS) { + throw new CleanUpException("Error running streams resetter. Exit code " + returnCode); + } + } + + static File createTemporaryPropertiesFile(final String appId, final Map config) { + // Writing properties requires Map + final Properties parsedProperties = toStringBasedProperties(config); + try { + final File tempFile = File.createTempFile(appId + "-reset", "temp"); + try (final FileOutputStream out = new FileOutputStream(tempFile)) { + parsedProperties.store(out, ""); + } + return tempFile; + } catch (final IOException e) { + throw new CleanUpException("Could not run StreamsResetter", e); + } + } + + static Properties toStringBasedProperties(final Map config) { + final Properties parsedProperties = new Properties(); + config.forEach((key, value) -> parsedProperties.setProperty(key, value.toString())); + return parsedProperties; + } + + private static Collection filterExistingTopics(final Collection topics, + final Collection allTopics) { + return topics.stream() + .filter(topicName -> { + final boolean exists = allTopics.contains(topicName); + if (!exists) { + log.warn("Not resetting missing topic {}", topicName); + } + return exists; + }) + .collect(Collectors.toList()); + } + + /** + * Clean up your Streams app by resetting the app and deleting the output topics + * and consumer group. + * @see #reset() + */ + @Override + public void clean() { + try (final ImprovedAdminClient adminClient = this.createAdminClient()) { + final Task task = new Task(adminClient); + task.cleanAndReset(); + } + } + + /** + * Clean up your Streams app by resetting all state stores, consumer group offsets, and internal topics, deleting + * local state. + */ + public void reset() { + try (final ImprovedAdminClient adminClient = this.createAdminClient()) { + final Task task = new Task(adminClient); + task.reset(); + } + } + + private Map getKafkaProperties() { + return this.config.getKafkaProperties(); + } + + private ImprovedAdminClient createAdminClient() { + return ImprovedAdminClient.create(this.getKafkaProperties()); + } + + @RequiredArgsConstructor + private class Task { + + private final @NonNull ImprovedAdminClient adminClient; + + private void reset() { + final Collection allTopics = this.adminClient.getTopicClient().listTopics(); + final List inputTopics = + StreamsCleanUpRunner.this.topologyInformation.getExternalSourceTopics(allTopics); + final List intermediateTopics = + StreamsCleanUpRunner.this.topologyInformation.getIntermediateTopics(allTopics); + runResetter(inputTopics, intermediateTopics, allTopics, StreamsCleanUpRunner.this.config); + // the StreamsResetter is responsible for deleting internal topics + StreamsCleanUpRunner.this.topologyInformation.getInternalTopics() + .forEach(this::resetInternalTopic); + try (final KafkaStreams kafkaStreams = this.createStreams()) { + kafkaStreams.cleanUp(); + } + StreamsCleanUpRunner.this.cleanHooks.runResetHooks(); + } + + private KafkaStreams createStreams() { + return new KafkaStreams(StreamsCleanUpRunner.this.topology, + new StreamsConfig(StreamsCleanUpRunner.this.getKafkaProperties())); + } + + private void cleanAndReset() { + this.reset(); + this.clean(); + } + + private void clean() { + this.deleteTopics(); + this.deleteConsumerGroup(); + StreamsCleanUpRunner.this.cleanHooks.runCleanHooks(); + } + + /** + * Delete output topics + */ + private void deleteTopics() { + final List externalTopics = StreamsCleanUpRunner.this.topologyInformation.getExternalSinkTopics(); + externalTopics.forEach(this::deleteTopic); + } + + private void resetInternalTopic(final String topic) { + this.adminClient.getSchemaTopicClient() + .resetSchemaRegistry(topic); + StreamsCleanUpRunner.this.cleanHooks.runTopicDeletionHooks(topic); + } + + private void deleteTopic(final String topic) { + this.adminClient.getSchemaTopicClient() + .deleteTopicAndResetSchemaRegistry(topic); + StreamsCleanUpRunner.this.cleanHooks.runTopicDeletionHooks(topic); + } + + private void deleteConsumerGroup() { + final ConsumerGroupClient consumerGroupClient = this.adminClient.getConsumerGroupClient(); + consumerGroupClient.deleteGroupIfExists(StreamsCleanUpRunner.this.config.getAppId()); + } + } + +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsExecutionOptions.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsExecutionOptions.java new file mode 100644 index 00000000..92e482d9 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsExecutionOptions.java @@ -0,0 +1,100 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import com.google.common.annotations.VisibleForTesting; +import java.time.Duration; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Supplier; +import lombok.Builder; +import lombok.NonNull; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.KafkaStreams.CloseOptions; +import org.apache.kafka.streams.KafkaStreams.StateListener; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; + +/** + * Options to run a Kafka Streams app using {@link StreamsRunner} + */ +@Builder +public class StreamsExecutionOptions { + /** + * Hook that is called after calling {@link KafkaStreams#start()} + */ + @Builder.Default + private final @NonNull Consumer onStart = runningStreams -> {}; + /** + * Configures {@link KafkaStreams#setStateListener(StateListener)} + */ + @Builder.Default + private final @NonNull Supplier stateListener = NoOpStateListener::new; + /** + * Configures {@link KafkaStreams#setUncaughtExceptionHandler(StreamsUncaughtExceptionHandler)} + */ + @Builder.Default + private final @NonNull Supplier uncaughtExceptionHandler = + DefaultStreamsUncaughtExceptionHandler::new; + /** + * Defines if {@link ConsumerConfig#GROUP_INSTANCE_ID_CONFIG} is volatile. If it is configured and non-volatile, + * {@link KafkaStreams#close(CloseOptions)} is called with {@link CloseOptions#leaveGroup(boolean)} disabled + */ + @Builder.Default + private final boolean volatileGroupInstanceId = true; + /** + * Defines {@link CloseOptions#timeout(Duration)} when calling {@link KafkaStreams#close(CloseOptions)} + */ + @Builder.Default + private final Duration closeTimeout = Duration.ofMillis(Long.MAX_VALUE); + + private static boolean isStaticMembershipDisabled(final Map originals) { + return originals.get(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG) == null; + } + + CloseOptions createCloseOptions(final StreamsConfig config) { + final boolean leaveGroup = this.shouldLeaveGroup(config.originals()); + return new CloseOptions().leaveGroup(leaveGroup).timeout(this.closeTimeout); + } + + @VisibleForTesting + boolean shouldLeaveGroup(final Map originals) { + final boolean staticMembershipDisabled = isStaticMembershipDisabled(originals); + return staticMembershipDisabled || this.volatileGroupInstanceId; + } + + void onStart(final RunningStreams runningStreams) { + this.onStart.accept(runningStreams); + } + + StreamsUncaughtExceptionHandler createUncaughtExceptionHandler() { + return this.uncaughtExceptionHandler.get(); + } + + StateListener createStateListener() { + return this.stateListener.get(); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsRunner.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsRunner.java new file mode 100644 index 00000000..1f9cf18a --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsRunner.java @@ -0,0 +1,131 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.KafkaStreams.CloseOptions; +import org.apache.kafka.streams.KafkaStreams.State; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; + +/** + * Runs a Kafka Streams application + */ +@Slf4j +public final class StreamsRunner implements Runner { + + private final @NonNull ImprovedStreamsConfig config; + private final @NonNull Topology topology; + private final @NonNull KafkaStreams streams; + private final @NonNull CapturingStreamsUncaughtExceptionHandler exceptionHandler; + private final @NonNull StreamsShutdownStateListener shutdownListener; + private final @NonNull CloseOptions closeOptions; + private final @NonNull StreamsExecutionOptions executionOptions; + + /** + * Create a {@code StreamsRunner} with default {@link StreamsExecutionOptions} + * @param topology topology to be executed + * @param config streams configuration + */ + public StreamsRunner(final @NonNull Topology topology, final @NonNull StreamsConfig config) { + this(topology, config, StreamsExecutionOptions.builder().build()); + } + + /** + * Create a {@code StreamsRunner} + * @param topology topology to be executed + * @param config streams configuration + * @param options options to customize {@link KafkaStreams} behavior + */ + public StreamsRunner(final @NonNull Topology topology, final @NonNull StreamsConfig config, + final @NonNull StreamsExecutionOptions options) { + this.config = new ImprovedStreamsConfig(config); + this.topology = topology; + this.streams = new KafkaStreams(topology, config); + this.exceptionHandler = new CapturingStreamsUncaughtExceptionHandler(options.createUncaughtExceptionHandler()); + this.streams.setUncaughtExceptionHandler(this.exceptionHandler); + this.shutdownListener = new StreamsShutdownStateListener(options.createStateListener()); + this.streams.setStateListener(this.shutdownListener); + this.closeOptions = options.createCloseOptions(config); + this.executionOptions = options; + } + + /** + * Run the Streams application. This method blocks until Kafka Streams has completed shutdown, either because it + * caught an error or {@link #close()} has been called. + */ + @Override + public void run() { + this.runStreams(); + this.awaitStreamsShutdown(); + this.checkErrors(); + } + + @Override + public void close() { + log.info("Closing Kafka Streams"); + final boolean success = this.streams.close(this.closeOptions); + if (success) { + log.info("Successfully closed Kafka Streams"); + } else { + log.info("Timed out closing Kafka Streams"); + } + } + + private void checkErrors() { + if (this.hasErrored()) { + this.exceptionHandler.throwException(); + } + } + + private boolean hasErrored() { + return this.streams.state() == State.ERROR; + } + + private void runStreams() { + log.info("Starting Kafka Streams"); + log.debug("Streams topology:\n{}", this.topology.describe()); + this.streams.start(); + log.debug("Calling start hook"); + final RunningStreams runningStreams = RunningStreams.builder() + .streams(this.streams) + .config(this.config) + .topology(this.topology) + .build(); + this.executionOptions.onStart(runningStreams); + } + + private void awaitStreamsShutdown() { + try { + this.shutdownListener.await(); + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + throw new StreamsApplicationException("Error awaiting Streams shutdown", e); + } + } + +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsShutdownStateListener.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsShutdownStateListener.java new file mode 100644 index 00000000..7b4c13e0 --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsShutdownStateListener.java @@ -0,0 +1,50 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.concurrent.CountDownLatch; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import org.apache.kafka.streams.KafkaStreams.State; +import org.apache.kafka.streams.KafkaStreams.StateListener; + +@RequiredArgsConstructor +class StreamsShutdownStateListener implements StateListener { + + private final CountDownLatch streamsShutdown = new CountDownLatch(1); + private @NonNull StateListener wrapped; + + @Override + public void onChange(final State newState, final State oldState) { + this.wrapped.onChange(newState, oldState); + if (newState.hasCompletedShutdown()) { + this.streamsShutdown.countDown(); + } + } + + void await() throws InterruptedException { + this.streamsShutdown.await(); + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsTopicConfig.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsTopicConfig.java new file mode 100644 index 00000000..4aac95fa --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/StreamsTopicConfig.java @@ -0,0 +1,103 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; + +import com.google.common.base.Preconditions; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.NonNull; +import lombok.Value; + +/** + * Provides topic configuration for a {@link StreamsApp} + */ +@Builder +@Value +@EqualsAndHashCode +public class StreamsTopicConfig { + + @Builder.Default + @NonNull List inputTopics = emptyList(); + /** + * Extra input topics that are identified by a role + */ + @Builder.Default + @NonNull Map> extraInputTopics = emptyMap(); + Pattern inputPattern; + /** + * Extra input patterns that are identified by a role + */ + @Builder.Default + @NonNull Map extraInputPatterns = emptyMap(); + String outputTopic; + /** + * Extra output topics that are identified by a role + */ + @Builder.Default + @NonNull Map extraOutputTopics = emptyMap(); + String errorTopic; + + /** + * Get extra input topics for a specified role + * + * @param role role of extra input topics + * @return topic names + */ + public List getInputTopics(final String role) { + final List topics = this.extraInputTopics.get(role); + Preconditions.checkNotNull(topics, "No input topics for role '%s' available", role); + return topics; + } + + /** + * Get extra input pattern for a specified role + * + * @param role role of extra input pattern + * @return topic pattern + */ + public Pattern getInputPattern(final String role) { + final Pattern pattern = this.extraInputPatterns.get(role); + Preconditions.checkNotNull(pattern, "No input pattern for role '%s' available", role); + return pattern; + } + + /** + * Get extra output topic for a specified role + * + * @param role role of extra output topic + * @return topic name + */ + public String getOutputTopic(final String role) { + final String topic = this.extraOutputTopics.get(role); + Preconditions.checkNotNull(topic, "No output topic for role '%s' available", role); + return topic; + } +} diff --git a/streams-bootstrap-core/src/main/java/com/bakdata/kafka/TopologyBuilder.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/TopologyBuilder.java new file mode 100644 index 00000000..ba8992df --- /dev/null +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/TopologyBuilder.java @@ -0,0 +1,159 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.Map; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.Value; +import org.apache.kafka.streams.StreamsBuilder; +import org.apache.kafka.streams.Topology; +import org.apache.kafka.streams.kstream.Consumed; +import org.apache.kafka.streams.kstream.KStream; + +/** + * Provides all runtime configurations and supports building a {@link Topology} of a {@link StreamsApp} + * + * @see StreamsApp#buildTopology(TopologyBuilder) + */ +@RequiredArgsConstructor +@Value +public class TopologyBuilder { + + StreamsBuilder streamsBuilder = new StreamsBuilder(); + @NonNull + StreamsTopicConfig topics; + @NonNull + Map kafkaProperties; + + /** + * Create a {@code KStream} from all {@link StreamsTopicConfig#getInputTopics()} + * @param consumed define optional parameters for streaming topics + * @return a {@code KStream} for all {@link StreamsTopicConfig#getInputTopics()} + * @param type of keys + * @param type of values + */ + public KStream streamInput(final Consumed consumed) { + return this.streamsBuilder.stream(this.topics.getInputTopics(), consumed); + } + + /** + * Create a {@code KStream} from all {@link StreamsTopicConfig#getInputTopics()} + * @return a {@code KStream} for all {@link StreamsTopicConfig#getInputTopics()} + * @param type of keys + * @param type of values + */ + public KStream streamInput() { + return this.streamsBuilder.stream(this.topics.getInputTopics()); + } + + /** + * Create a {@code KStream} from all {@link StreamsTopicConfig#getInputTopics(String)} + * @param role role of extra input topics + * @param consumed define optional parameters for streaming topics + * @return a {@code KStream} for all {@link StreamsTopicConfig#getInputTopics(String)} + * @param type of keys + * @param type of values + */ + public KStream streamInput(final String role, final Consumed consumed) { + return this.streamsBuilder.stream(this.topics.getInputTopics(role), consumed); + } + + /** + * Create a {@code KStream} from all {@link StreamsTopicConfig#getInputTopics(String)} + * @param role role of extra input topics + * @return a {@code KStream} for all {@link StreamsTopicConfig#getInputTopics(String)} + * @param type of keys + * @param type of values + */ + public KStream streamInput(final String role) { + return this.streamsBuilder.stream(this.topics.getInputTopics(role)); + } + + /** + * Create a {@code KStream} from all topics matching {@link StreamsTopicConfig#getInputPattern()} + * @param consumed define optional parameters for streaming topics + * @return a {@code KStream} for all topics matching {@link StreamsTopicConfig#getInputPattern()} + * @param type of keys + * @param type of values + */ + public KStream streamInputPattern(final Consumed consumed) { + return this.streamsBuilder.stream(this.topics.getInputPattern(), consumed); + } + + /** + * Create a {@code KStream} from all topics matching {@link StreamsTopicConfig#getInputPattern()} + * @return a {@code KStream} for all topics matching {@link StreamsTopicConfig#getInputPattern()} + * @param type of keys + * @param type of values + */ + public KStream streamInputPattern() { + return this.streamsBuilder.stream(this.topics.getInputPattern()); + } + + /** + * Create a {@code KStream} from all topics matching {@link StreamsTopicConfig#getInputPattern(String)} + * @param role role of extra input pattern + * @param consumed define optional parameters for streaming topics + * @return a {@code KStream} for all topics matching {@link StreamsTopicConfig#getInputPattern(String)} + * @param type of keys + * @param type of values + */ + public KStream streamInputPattern(final String role, final Consumed consumed) { + return this.streamsBuilder.stream(this.topics.getInputPattern(role), consumed); + } + + /** + * Create a {@code KStream} from all topics matching {@link StreamsTopicConfig#getInputPattern(String)} + * @param role role of extra input pattern + * @return a {@code KStream} for all topics matching {@link StreamsTopicConfig#getInputPattern(String)} + * @param type of keys + * @param type of values + */ + public KStream streamInputPattern(final String role) { + return this.streamsBuilder.stream(this.topics.getInputPattern(role)); + } + + /** + * Create {@code Configurator} to configure {@link org.apache.kafka.common.serialization.Serde} and + * {@link org.apache.kafka.common.serialization.Serializer} using {@link #kafkaProperties}. + * @return {@code Configurator} + */ + public Configurator createConfigurator() { + return new Configurator(this.kafkaProperties); + } + + /** + * Create {@code EffectiveAppConfiguration} used by this app + * @return {@code EffectiveAppConfiguration} + */ + public EffectiveAppConfiguration createEffectiveConfiguration() { + return new EffectiveAppConfiguration<>(this.topics, this.kafkaProperties); + } + + Topology build() { + return this.streamsBuilder.build(); + } +} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/ConsumerGroupClient.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/ConsumerGroupClient.java similarity index 90% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/ConsumerGroupClient.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/ConsumerGroupClient.java index d68c27a0..a813785e 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/ConsumerGroupClient.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/ConsumerGroupClient.java @@ -29,7 +29,6 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -61,17 +60,6 @@ public static ConsumerGroupClient create(final Map configs, fina return new ConsumerGroupClient(AdminClient.create(configs), timeout); } - /** - * Creates a new {@code ConsumerGroupClient} using the specified configuration. - * - * @param configs properties passed to {@link AdminClient#create(Properties)} - * @param timeout timeout for waiting for Kafka admin calls - * @return {@code ConsumerGroupClient} - */ - public static ConsumerGroupClient create(final Properties configs, final Duration timeout) { - return new ConsumerGroupClient(AdminClient.create(configs), timeout); - } - /** * Delete a consumer group. * diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/DirectTopicSubscription.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/DirectTopicSubscription.java similarity index 98% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/DirectTopicSubscription.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/DirectTopicSubscription.java index 7c7a0c18..06e4e150 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/DirectTopicSubscription.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/DirectTopicSubscription.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/ImprovedAdminClient.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/ImprovedAdminClient.java similarity index 73% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/ImprovedAdminClient.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/ImprovedAdminClient.java index 7772a83e..167d48b2 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/ImprovedAdminClient.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/ImprovedAdminClient.java @@ -28,14 +28,15 @@ import com.google.common.base.Preconditions; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; import java.time.Duration; +import java.util.Map; import java.util.Optional; -import java.util.Properties; +import lombok.AccessLevel; import lombok.Builder; -import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.experimental.Delegate; @@ -46,24 +47,43 @@ /** * Provide methods for common operations when performing administrative actions on a Kafka cluster */ +@Builder(access = AccessLevel.PRIVATE) public final class ImprovedAdminClient implements Closeable { - @Getter - private final @NonNull Properties properties; + private static final Duration ADMIN_TIMEOUT = Duration.ofSeconds(10L); private final @NonNull Admin adminClient; private final SchemaRegistryClient schemaRegistryClient; private final @NonNull Duration timeout; - @Builder - private ImprovedAdminClient(@NonNull final Properties properties, - final String schemaRegistryUrl, @NonNull final Duration timeout) { - Preconditions.checkNotNull(properties.getProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG), + /** + * Create a new admin client with default timeout + * @param properties Kafka configuration + * @return admin client + */ + public static ImprovedAdminClient create(@NonNull final Map properties) { + return create(properties, ADMIN_TIMEOUT); + } + + /** + * Create a new admin client + * @param properties Kafka configuration + * @param timeout timeout when performing admin operations + * @return admin client + */ + public static ImprovedAdminClient create(@NonNull final Map properties, + @NonNull final Duration timeout) { + Preconditions.checkNotNull(properties.get(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG), "%s must be specified in properties", AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG); - this.properties = new Properties(properties); - this.adminClient = AdminClient.create(properties); - this.schemaRegistryClient = - schemaRegistryUrl == null ? null : createSchemaRegistryClient(this.properties, schemaRegistryUrl); - this.timeout = timeout; + final Admin adminClient = AdminClient.create(properties); + final String schemaRegistryUrl = + (String) properties.get(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG); + final SchemaRegistryClient schemaRegistryClient = + schemaRegistryUrl == null ? null : createSchemaRegistryClient(properties, schemaRegistryUrl); + return builder() + .adminClient(adminClient) + .schemaRegistryClient(schemaRegistryClient) + .timeout(timeout) + .build(); } public Admin getAdminClient() { @@ -87,10 +107,6 @@ public ConsumerGroupClient getConsumerGroupClient() { return new ConsumerGroupClient(this.getAdminClient(), this.timeout); } - public String getBootstrapServers() { - return this.properties.getProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG); - } - @Override public void close() { this.adminClient.close(); diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/KafkaAdminException.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/KafkaAdminException.java similarity index 97% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/KafkaAdminException.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/KafkaAdminException.java index f363b76e..b1a5d6f9 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/KafkaAdminException.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/KafkaAdminException.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/PatternTopicSubscription.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/PatternTopicSubscription.java similarity index 98% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/PatternTopicSubscription.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/PatternTopicSubscription.java index 7fd0286f..5de2db60 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/PatternTopicSubscription.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/PatternTopicSubscription.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/SchemaTopicClient.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/SchemaTopicClient.java similarity index 91% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/SchemaTopicClient.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/SchemaTopicClient.java index 8bfd8836..f7474442 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/SchemaTopicClient.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/SchemaTopicClient.java @@ -33,10 +33,8 @@ import java.io.UncheckedIOException; import java.time.Duration; import java.util.Collection; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Properties; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -55,12 +53,12 @@ public final class SchemaTopicClient implements Closeable { /** * Creates a new {@code SchemaTopicClient} using the specified configuration. * - * @param configs properties passed to {@link AdminClient#create(Properties)} + * @param configs properties passed to {@link AdminClient#create(Map)} * @param schemaRegistryUrl URL of schema registry * @param timeout timeout for waiting for Kafka admin calls * @return {@code SchemaTopicClient} */ - public static SchemaTopicClient create(final Properties configs, final String schemaRegistryUrl, + public static SchemaTopicClient create(final Map configs, final String schemaRegistryUrl, final Duration timeout) { final SchemaRegistryClient schemaRegistryClient = createSchemaRegistryClient(configs, schemaRegistryUrl); @@ -71,11 +69,11 @@ public static SchemaTopicClient create(final Properties configs, final String sc /** * Creates a new {@code SchemaTopicClient} with no {@link SchemaRegistryClient} using the specified configuration. * - * @param configs properties passed to {@link AdminClient#create(Properties)} + * @param configs properties passed to {@link AdminClient#create(Map)} * @param timeout timeout for waiting for Kafka admin calls * @return {@code SchemaTopicClient} */ - public static SchemaTopicClient create(final Properties configs, final Duration timeout) { + public static SchemaTopicClient create(final Map configs, final Duration timeout) { final TopicClient topicClient = TopicClient.create(configs, timeout); return new SchemaTopicClient(topicClient, null); } @@ -88,11 +86,9 @@ public static SchemaTopicClient create(final Properties configs, final Duration * @param schemaRegistryUrl URL of schema registry * @return {@link SchemaRegistryClient} */ - public static SchemaRegistryClient createSchemaRegistryClient(@NonNull final Map configs, + public static SchemaRegistryClient createSchemaRegistryClient(@NonNull final Map configs, @NonNull final String schemaRegistryUrl) { - final Map originals = new HashMap<>(); - configs.forEach((key, value) -> originals.put(key.toString(), value)); - return SchemaRegistryClientFactory.newClient(List.of(schemaRegistryUrl), CACHE_CAPACITY, null, originals, null); + return SchemaRegistryClientFactory.newClient(List.of(schemaRegistryUrl), CACHE_CAPACITY, null, configs, null); } /** diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicClient.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicClient.java similarity index 95% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicClient.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicClient.java index 4889a070..3173b337 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicClient.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicClient.java @@ -30,7 +30,6 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -66,17 +65,6 @@ public static TopicClient create(final Map configs, final Durati return new TopicClient(AdminClient.create(configs), timeout); } - /** - * Creates a new {@code TopicClient} using the specified configuration. - * - * @param configs properties passed to {@link AdminClient#create(Properties)} - * @param timeout timeout for waiting for Kafka admin calls - * @return {@code TopicClient} - */ - public static TopicClient create(final Properties configs, final Duration timeout) { - return new TopicClient(AdminClient.create(configs), timeout); - } - private static KafkaAdminException failedToDeleteTopic(final String topicName, final Throwable ex) { return new KafkaAdminException("Failed to delete topic " + topicName, ex); } diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicSettings.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicSettings.java similarity index 97% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicSettings.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicSettings.java index 61af845c..329adf1f 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicSettings.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicSettings.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicSubscription.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicSubscription.java similarity index 98% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicSubscription.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicSubscription.java index ec519691..8235cbe8 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopicSubscription.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopicSubscription.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopologyInformation.java b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopologyInformation.java similarity index 98% rename from streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopologyInformation.java rename to streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopologyInformation.java index af1f870b..30b75408 100644 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/util/TopologyInformation.java +++ b/streams-bootstrap-core/src/main/java/com/bakdata/kafka/util/TopologyInformation.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -37,7 +37,7 @@ import org.apache.kafka.streams.TopologyDescription.Source; import org.apache.kafka.streams.kstream.KTable; import org.apache.kafka.streams.kstream.Materialized; -import org.apache.kafka.streams.kstream.Named; +import org.apache.kafka.streams.kstream.TableJoined; import org.apache.kafka.streams.kstream.ValueJoiner; import org.jooq.lambda.Seq; @@ -52,8 +52,7 @@ public class TopologyInformation { private static final String FILTER_SUFFIX = "-filter"; /** * See - * {@link org.apache.kafka.streams.kstream.internals.KTableImpl#doJoinOnForeignKey(KTable, Function, ValueJoiner, - * Named, Materialized, boolean)} + * {@link org.apache.kafka.streams.kstream.internals.KTableImpl#doJoinOnForeignKey(KTable, Function, ValueJoiner, TableJoined, Materialized, boolean)} */ private static final Collection PSEUDO_TOPIC_SUFFIXES = Set.of("-pk", "-fk", "-vh"); private final String streamsId; diff --git a/streams-bootstrap-core/src/test/avro/TestRecord.avsc b/streams-bootstrap-core/src/test/avro/TestRecord.avsc new file mode 100644 index 00000000..01088fe2 --- /dev/null +++ b/streams-bootstrap-core/src/test/avro/TestRecord.avsc @@ -0,0 +1,11 @@ +{ + "type": "record", + "namespace": "com.bakdata.kafka", + "name": "TestRecord", + "fields": [ + { + "name": "content", + "type": "string" + } + ] +} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/AvroMirrorTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/AvroMirrorTest.java similarity index 56% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/AvroMirrorTest.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/AvroMirrorTest.java index 6f2269f7..ef2535be 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/AvroMirrorTest.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/AvroMirrorTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -32,32 +32,47 @@ import org.junit.jupiter.api.extension.RegisterExtension; class AvroMirrorTest { - private final MirrorWithNonDefaultSerde app = createApp(); + private final ConfiguredStreamsApp app = createApp(); @RegisterExtension - final TestTopologyExtension testTopology = + final TestTopologyExtension testTopology = StreamsBootstrapTopologyFactory.createTopologyExtensionWithSchemaRegistry(this.app); - private static MirrorWithNonDefaultSerde createApp() { - final MirrorWithNonDefaultSerde app = new MirrorWithNonDefaultSerde(); - app.setBrokers("localhost:9092"); - app.setInputTopics(List.of("input")); - app.setOutputTopic("output"); - return app; + private static ConfiguredStreamsApp createApp() { + final AppConfiguration configuration = new AppConfiguration<>(StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build()); + return new ConfiguredStreamsApp<>(new MirrorWithNonDefaultSerde(), configuration); } @Test void shouldMirror() { - final Serde valueSerde = this.app.getValueSerde(); - final TestRecord record = TestRecord.newBuilder() + final Serde keySerde = this.getKeySerde(); + final Serde valueSerde = this.getValueSerde(); + final TestRecord testRecord = TestRecord.newBuilder() .setContent("bar") .build(); this.testTopology.input() + .withKeySerde(keySerde) .withValueSerde(valueSerde) - .add("foo", record); + .add(testRecord, testRecord); this.testTopology.streamOutput() + .withKeySerde(keySerde) .withValueSerde(valueSerde) - .expectNextRecord().hasKey("foo").hasValue(record) + .expectNextRecord().hasKey(testRecord).hasValue(testRecord) .expectNoMoreRecord(); } + + private Serde getValueSerde() { + return this.createSerdeFactory().configureForValues(MirrorWithNonDefaultSerde.newValueSerde()); + } + + private Configurator createSerdeFactory() { + return StreamsBootstrapTopologyFactory.createConfigurator(this.testTopology); + } + + private Serde getKeySerde() { + return this.createSerdeFactory().configureForKeys(MirrorWithNonDefaultSerde.newKeySerde()); + } } diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguratorTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguratorTest.java new file mode 100644 index 00000000..92e33e6b --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguratorTest.java @@ -0,0 +1,174 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static org.mockito.Mockito.verify; + +import java.util.Map; +import org.apache.kafka.common.serialization.Serde; +import org.apache.kafka.common.serialization.Serializer; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(SoftAssertionsExtension.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class ConfiguratorTest { + + @InjectSoftAssertions + private SoftAssertions softly; + @Mock + private Serde serde; + @Mock + private Serializer serializer; + + @Test + void shouldConfigureValueSerde() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForValues(this.serde)).isEqualTo(this.serde); + verify(this.serde).configure(Map.of( + "prop1", "value1", + "prop2", "value2" + ), false); + } + + @Test + void shouldConfigureValueSerdeWithConfig() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForValues(this.serde, Map.of( + "prop2", "overridden", + "prop3", "value3" + ))).isEqualTo(this.serde); + verify(this.serde).configure(Map.of( + "prop1", "value1", + "prop2", "overridden", + "prop3", "value3" + ), false); + } + + @Test + void shouldConfigureKeySerde() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForKeys(this.serde)).isEqualTo(this.serde); + verify(this.serde).configure(Map.of( + "prop1", "value1", + "prop2", "value2" + ), true); + } + + @Test + void shouldConfigureKeySerdeWithConfig() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForKeys(this.serde, Map.of( + "prop2", "overridden", + "prop3", "value3" + ))).isEqualTo(this.serde); + verify(this.serde).configure(Map.of( + "prop1", "value1", + "prop2", "overridden", + "prop3", "value3" + ), true); + } + + @Test + void shouldConfigureValueSerializer() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForValues(this.serializer)).isEqualTo(this.serializer); + verify(this.serializer).configure(Map.of( + "prop1", "value1", + "prop2", "value2" + ), false); + } + + @Test + void shouldConfigureValueSerializerWithConfig() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForValues(this.serializer, Map.of( + "prop2", "overridden", + "prop3", "value3" + ))).isEqualTo(this.serializer); + verify(this.serializer).configure(Map.of( + "prop1", "value1", + "prop2", "overridden", + "prop3", "value3" + ), false); + } + + @Test + void shouldConfigureKeySerializer() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForKeys(this.serializer)).isEqualTo(this.serializer); + verify(this.serializer).configure(Map.of( + "prop1", "value1", + "prop2", "value2" + ), true); + } + + @Test + void shouldConfigureKeySerializerWithConfig() { + final Configurator configurator = new Configurator(Map.of( + "prop1", "value1", + "prop2", "value2" + )); + this.softly.assertThat(configurator.configureForKeys(this.serializer, Map.of( + "prop2", "overridden", + "prop3", "value3" + ))).isEqualTo(this.serializer); + verify(this.serializer).configure(Map.of( + "prop1", "value1", + "prop2", "overridden", + "prop3", "value3" + ), true); + } + +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguredProducerAppTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguredProducerAppTest.java new file mode 100644 index 00000000..68314c09 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguredProducerAppTest.java @@ -0,0 +1,118 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; +import static org.assertj.core.api.Assertions.assertThat; + +import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer; +import java.util.Map; +import org.apache.kafka.common.serialization.StringSerializer; +import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.SetEnvironmentVariable; + +class ConfiguredProducerAppTest { + + private static AppConfiguration newAppConfiguration() { + return new AppConfiguration<>(emptyTopicConfig()); + } + + private static ProducerTopicConfig emptyTopicConfig() { + return ProducerTopicConfig.builder().build(); + } + + @Test + void shouldPrioritizeConfigCLIParameters() { + final AppConfiguration configuration = new AppConfiguration<>(emptyTopicConfig(), Map.of( + "foo", "baz", + "kafka", "streams" + )); + final ConfiguredProducerApp configuredApp = + new ConfiguredProducerApp<>(new TestProducer(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .build())) + .containsEntry("foo", "baz") + .containsEntry("kafka", "streams") + .containsEntry("hello", "world"); + } + + @Test + @SetEnvironmentVariable(key = "STREAMS_FOO", value = "baz") + @SetEnvironmentVariable(key = "STREAMS_STREAMS", value = "streams") + void shouldPrioritizeEnvironmentConfigs() { + final AppConfiguration configuration = newAppConfiguration(); + final ConfiguredProducerApp configuredApp = + new ConfiguredProducerApp<>(new TestProducer(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .build())) + .containsEntry("foo", "baz") + .containsEntry("streams", "streams") + .containsEntry("hello", "world"); + } + + @Test + void shouldSetDefaultAvroSerializerWhenSchemaRegistryUrlIsSet() { + final AppConfiguration configuration = newAppConfiguration(); + final ConfiguredProducerApp configuredApp = + new ConfiguredProducerApp<>(new TestProducer(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .schemaRegistryUrl("fake") + .build())) + .containsEntry(KEY_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class) + .containsEntry(VALUE_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class); + } + + @Test + void shouldSetDefaultStringSerializerWhenSchemaRegistryUrlIsNotSet() { + final AppConfiguration configuration = newAppConfiguration(); + final ConfiguredProducerApp configuredApp = + new ConfiguredProducerApp<>(new TestProducer(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .build())) + .containsEntry(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .containsEntry(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); + } + + private static class TestProducer implements ProducerApp { + + @Override + public ProducerRunnable buildRunnable(final ProducerBuilder builder) { + throw new UnsupportedOperationException(); + } + + @Override + public Map createKafkaProperties() { + return Map.of( + "foo", "bar", + "hello", "world" + ); + } + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguredStreamsAppTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguredStreamsAppTest.java new file mode 100644 index 00000000..c9037bc4 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ConfiguredStreamsAppTest.java @@ -0,0 +1,125 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; +import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG; +import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG; +import static org.assertj.core.api.Assertions.assertThat; + +import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; +import java.util.Map; +import org.apache.kafka.common.serialization.Serdes.StringSerde; +import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.SetEnvironmentVariable; + +class ConfiguredStreamsAppTest { + + private static StreamsTopicConfig emptyTopicConfig() { + return StreamsTopicConfig.builder().build(); + } + + private static AppConfiguration newAppConfiguration() { + return new AppConfiguration<>(emptyTopicConfig()); + } + + @Test + void shouldPrioritizeConfigCLIParameters() { + final AppConfiguration configuration = new AppConfiguration<>(emptyTopicConfig(), Map.of( + "foo", "baz", + "kafka", "streams" + )); + final ConfiguredStreamsApp configuredApp = + new ConfiguredStreamsApp<>(new TestApplication(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .build())) + .containsEntry("foo", "baz") + .containsEntry("kafka", "streams") + .containsEntry("hello", "world"); + } + + @Test + @SetEnvironmentVariable(key = "STREAMS_FOO", value = "baz") + @SetEnvironmentVariable(key = "STREAMS_STREAMS", value = "streams") + void shouldPrioritizeEnvironmentConfigs() { + final AppConfiguration configuration = newAppConfiguration(); + final ConfiguredStreamsApp configuredApp = + new ConfiguredStreamsApp<>(new TestApplication(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .build())) + .containsEntry("foo", "baz") + .containsEntry("streams", "streams") + .containsEntry("hello", "world"); + } + + @Test + void shouldSetDefaultAvroSerdeWhenSchemaRegistryUrlIsSet() { + final AppConfiguration configuration = newAppConfiguration(); + final ConfiguredStreamsApp configuredApp = + new ConfiguredStreamsApp<>(new TestApplication(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .schemaRegistryUrl("fake") + .build())) + .containsEntry(DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class) + .containsEntry(DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class) + .containsEntry(SCHEMA_REGISTRY_URL_CONFIG, "fake"); + } + + @Test + void shouldSetDefaultStringSerdeWhenSchemaRegistryUrlIsNotSet() { + final AppConfiguration configuration = newAppConfiguration(); + final ConfiguredStreamsApp configuredApp = + new ConfiguredStreamsApp<>(new TestApplication(), configuration); + assertThat(configuredApp.getKafkaProperties(KafkaEndpointConfig.builder() + .brokers("fake") + .build())) + .containsEntry(DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class) + .containsEntry(DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); + } + + private static class TestApplication implements StreamsApp { + + @Override + public void buildTopology(final TopologyBuilder builder) { + throw new UnsupportedOperationException(); + } + + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + return "foo"; + } + + @Override + public Map createKafkaProperties() { + return Map.of( + "foo", "bar", + "hello", "world" + ); + } + } +} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/EnvironmentStreamsConfigParserTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/EnvironmentStreamsConfigParserTest.java similarity index 98% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/EnvironmentStreamsConfigParserTest.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/EnvironmentStreamsConfigParserTest.java index ac89d8d1..78d2480e 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/EnvironmentStreamsConfigParserTest.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/EnvironmentStreamsConfigParserTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ExecutableProducerAppTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ExecutableProducerAppTest.java new file mode 100644 index 00000000..03fbe575 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ExecutableProducerAppTest.java @@ -0,0 +1,118 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Supplier; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class ExecutableProducerAppTest { + + @Mock + private Consumer> setup; + @Mock + private Supplier setupCleanUp; + + @Test + void shouldCallSetupWhenCreatingRunner() { + final ProducerTopicConfig topics = ProducerTopicConfig.builder() + .outputTopic("output") + .build(); + final AppConfiguration configuration = new AppConfiguration<>(topics); + final ConfiguredProducerApp configuredApp = + new ConfiguredProducerApp<>(new TestProducer(), configuration); + final KafkaEndpointConfig endpointConfig = KafkaEndpointConfig.builder() + .brokers("localhost:9092") + .build(); + final ExecutableProducerApp executableApp = configuredApp.withEndpoint(endpointConfig); + final Map kafkaProperties = configuredApp.getKafkaProperties(endpointConfig); + executableApp.createRunner(); + verify(this.setup).accept(new EffectiveAppConfiguration<>(topics, kafkaProperties)); + } + + @Test + void shouldCallSetupWhenCreatingRunnerWithOptions() { + final ProducerTopicConfig topics = ProducerTopicConfig.builder() + .outputTopic("output") + .build(); + final AppConfiguration configuration = new AppConfiguration<>(topics); + final ConfiguredProducerApp configuredApp = + new ConfiguredProducerApp<>(new TestProducer(), configuration); + final KafkaEndpointConfig endpointConfig = KafkaEndpointConfig.builder() + .brokers("localhost:9092") + .build(); + final ExecutableProducerApp executableApp = configuredApp.withEndpoint(endpointConfig); + final Map kafkaProperties = configuredApp.getKafkaProperties(endpointConfig); + executableApp.createRunner(ProducerExecutionOptions.builder().build()); + verify(this.setup).accept(new EffectiveAppConfiguration<>(topics, kafkaProperties)); + } + + @Test + void shouldCallSetupCleanUpWhenCreatingCleanUpRunner() { + final ProducerTopicConfig topics = ProducerTopicConfig.builder() + .outputTopic("output") + .build(); + final AppConfiguration configuration = new AppConfiguration<>(topics); + final ConfiguredProducerApp configuredApp = + new ConfiguredProducerApp<>(new TestProducer(), configuration); + final KafkaEndpointConfig endpointConfig = KafkaEndpointConfig.builder() + .brokers("localhost:9092") + .build(); + final ExecutableProducerApp executableApp = configuredApp.withEndpoint(endpointConfig); + when(this.setupCleanUp.get()).thenReturn(new ProducerCleanUpConfiguration()); + executableApp.createCleanUpRunner(); + verify(this.setupCleanUp).get(); + } + + private class TestProducer implements ProducerApp { + + @Override + public void setup(final EffectiveAppConfiguration configuration) { + ExecutableProducerAppTest.this.setup.accept(configuration); + } + + @Override + public ProducerCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration configuration) { + return ExecutableProducerAppTest.this.setupCleanUp.get(); + } + + @Override + public ProducerRunnable buildRunnable(final ProducerBuilder builder) { + return () -> {}; + } + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ExecutableStreamsAppTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ExecutableStreamsAppTest.java new file mode 100644 index 00000000..c1ff3fbf --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ExecutableStreamsAppTest.java @@ -0,0 +1,128 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Supplier; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class ExecutableStreamsAppTest { + + @Mock + private Consumer> setup; + @Mock + private Supplier setupCleanUp; + + @Test + void shouldCallSetupWhenCreatingRunner() { + final StreamsTopicConfig topics = StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build(); + final AppConfiguration configuration = new AppConfiguration<>(topics); + final ConfiguredStreamsApp configuredApp = + new ConfiguredStreamsApp<>(new TestApplication(), configuration); + final KafkaEndpointConfig endpointConfig = KafkaEndpointConfig.builder() + .brokers("localhost:9092") + .build(); + final ExecutableStreamsApp executableApp = configuredApp.withEndpoint(endpointConfig); + final Map kafkaProperties = configuredApp.getKafkaProperties(endpointConfig); + executableApp.createRunner(); + verify(this.setup).accept(new EffectiveAppConfiguration<>(topics, kafkaProperties)); + } + + @Test + void shouldCallSetupWhenCreatingRunnerWithOptions() { + final StreamsTopicConfig topics = StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build(); + final AppConfiguration configuration = new AppConfiguration<>(topics); + final ConfiguredStreamsApp configuredApp = + new ConfiguredStreamsApp<>(new TestApplication(), configuration); + final KafkaEndpointConfig endpointConfig = KafkaEndpointConfig.builder() + .brokers("localhost:9092") + .build(); + final ExecutableStreamsApp executableApp = configuredApp.withEndpoint(endpointConfig); + final Map kafkaProperties = configuredApp.getKafkaProperties(endpointConfig); + executableApp.createRunner(StreamsExecutionOptions.builder().build()); + verify(this.setup).accept(new EffectiveAppConfiguration<>(topics, kafkaProperties)); + } + + @Test + void shouldCallSetupCleanUpWhenCreatingCleanUpRunner() { + final StreamsTopicConfig topics = StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build(); + final AppConfiguration configuration = new AppConfiguration<>(topics); + final ConfiguredStreamsApp configuredApp = + new ConfiguredStreamsApp<>(new TestApplication(), configuration); + final KafkaEndpointConfig endpointConfig = KafkaEndpointConfig.builder() + .brokers("localhost:9092") + .build(); + final ExecutableStreamsApp executableApp = configuredApp.withEndpoint(endpointConfig); + when(this.setupCleanUp.get()).thenReturn(new StreamsCleanUpConfiguration()); + executableApp.createCleanUpRunner(); + verify(this.setupCleanUp).get(); + } + + private class TestApplication implements StreamsApp { + + @Override + public void setup(final EffectiveAppConfiguration configuration) { + ExecutableStreamsAppTest.this.setup.accept(configuration); + } + + @Override + public StreamsCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration setupConfiguration) { + return ExecutableStreamsAppTest.this.setupCleanUp.get(); + } + + @Override + public void buildTopology(final TopologyBuilder builder) { + builder.streamInput() + .to(builder.getTopics().getOutputTopic()); + } + + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + return "foo"; + } + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ImprovedStreamsConfigTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ImprovedStreamsConfigTest.java new file mode 100644 index 00000000..4c1f4738 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/ImprovedStreamsConfigTest.java @@ -0,0 +1,98 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import java.util.List; +import java.util.Map; +import org.apache.kafka.streams.StreamsConfig; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +@ExtendWith(SoftAssertionsExtension.class) +class ImprovedStreamsConfigTest { + + @InjectSoftAssertions + private SoftAssertions softly; + + @Test + void shouldGetAppId() { + final StreamsConfig config = new StreamsConfig( + Map.of( + StreamsConfig.APPLICATION_ID_CONFIG, "test-app", + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "broker1:9092" + ) + ); + this.softly.assertThat(new ImprovedStreamsConfig(config).getAppId()) + .isEqualTo("test-app"); + } + + @Test + void shouldGetBootstrapServersFromList() { + final StreamsConfig config = new StreamsConfig( + Map.of( + StreamsConfig.APPLICATION_ID_CONFIG, "test-app", + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, List.of("broker1:9092", "broker2:9092", "broker3:9092") + ) + ); + this.softly.assertThat(new ImprovedStreamsConfig(config).getBoostrapServers()) + .isEqualTo(List.of("broker1:9092", "broker2:9092", "broker3:9092")); + } + + @Test + void shouldGetBootstrapServersFromString() { + final StreamsConfig config = new StreamsConfig( + Map.of( + StreamsConfig.APPLICATION_ID_CONFIG, "test-app", + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "broker1:9092,broker2:9092,broker3:9092" + ) + ); + this.softly.assertThat(new ImprovedStreamsConfig(config).getBoostrapServers()) + .isEqualTo(List.of("broker1:9092", "broker2:9092", "broker3:9092")); + } + + @Test + void shouldGetOriginalKafkaProperties() { + final StreamsConfig config = new StreamsConfig( + Map.of( + StreamsConfig.APPLICATION_ID_CONFIG, "test-app", + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "broker1:9092" + ) + ); + this.softly.assertThat(new ImprovedStreamsConfig(config).getKafkaProperties()) + .hasSize(2) + .anySatisfy((key, value) -> { + this.softly.assertThat(key).isEqualTo(StreamsConfig.APPLICATION_ID_CONFIG); + this.softly.assertThat(value).isEqualTo("test-app"); + }) + .anySatisfy((key, value) -> { + this.softly.assertThat(key).isEqualTo(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG); + this.softly.assertThat(value).isEqualTo("broker1:9092"); + }); + } + +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/PreconfiguredTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/PreconfiguredTest.java new file mode 100644 index 00000000..740343d5 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/PreconfiguredTest.java @@ -0,0 +1,44 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyMap; +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; + +class PreconfiguredTest { + + @Test + void shouldCreateDefaultSerde() { + assertThat(Preconfigured.defaultSerde().configureForValues(emptyMap())).isNull(); + } + + @Test + void shouldCreateDefaultSerializer() { + assertThat(Preconfigured.defaultSerializer().configureForValues(emptyMap())).isNull(); + } + +} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/CleanUpRunnerTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/StreamsCleanUpRunnerTest.java similarity index 77% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/CleanUpRunnerTest.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/StreamsCleanUpRunnerTest.java index 667028a7..a5af03d9 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/CleanUpRunnerTest.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/StreamsCleanUpRunnerTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -26,22 +26,23 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.bakdata.kafka.test_applications.WordCount; import java.io.File; import java.io.FileInputStream; import java.io.IOException; -import java.util.List; +import java.util.Map; import java.util.Properties; +import org.apache.kafka.common.serialization.StringSerializer; import org.junit.jupiter.api.Test; -class CleanUpRunnerTest { +class StreamsCleanUpRunnerTest { @Test void createTemporaryPropertiesFile() throws IOException { - final WordCount wordCount = new WordCount(); - wordCount.setInputTopics(List.of("input")); - final File file = CleanUpRunner.createTemporaryPropertiesFile(wordCount.getUniqueAppId(), - wordCount.getKafkaProperties()); + final Map config = Map.of( + "foo", "bar", + "baz", StringSerializer.class + ); + final File file = StreamsCleanUpRunner.createTemporaryPropertiesFile("appId", config); assertThat(file).exists(); @@ -50,7 +51,7 @@ void createTemporaryPropertiesFile() throws IOException { properties.load(inStream); } - final Properties expected = CleanUpRunner.toStringBasedProperties(wordCount.getKafkaProperties()); + final Properties expected = StreamsCleanUpRunner.toStringBasedProperties(config); assertThat(properties).containsAllEntriesOf(expected); } } diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/StreamsExecutionOptionsTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/StreamsExecutionOptionsTest.java new file mode 100644 index 00000000..95d94631 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/StreamsExecutionOptionsTest.java @@ -0,0 +1,62 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static java.util.Collections.emptyMap; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Map; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.junit.jupiter.api.Test; + +class StreamsExecutionOptionsTest { + + @Test + void shouldLeaveGroup() { + final StreamsExecutionOptions options = StreamsExecutionOptions.builder() + .build(); + assertThat(options.shouldLeaveGroup(emptyMap())).isTrue(); + } + + @Test + void shouldNotLeaveGroup() { + final StreamsExecutionOptions options = StreamsExecutionOptions.builder() + .volatileGroupInstanceId(false) + .build(); + assertThat(options.shouldLeaveGroup(Map.of( + ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "foo" + ))).isFalse(); + } + + @Test + void shouldLeaveGroupWithVolatileGroupId() { + final StreamsExecutionOptions options = StreamsExecutionOptions.builder() + .volatileGroupInstanceId(true) + .build(); + assertThat(options.shouldLeaveGroup(Map.of( + ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "foo" + ))).isTrue(); + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/TestUtil.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/TestUtil.java new file mode 100644 index 00000000..a40268a6 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/TestUtil.java @@ -0,0 +1,49 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; +import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.newClusterConfig; +import static net.mguenther.kafka.junit.EmbeddedKafkaConfig.brokers; + +import lombok.experimental.UtilityClass; +import net.mguenther.kafka.junit.EmbeddedKafkaCluster; +import net.mguenther.kafka.junit.EmbeddedKafkaConfig.EmbeddedKafkaConfigBuilder; + +@UtilityClass +public class TestUtil { + public static EmbeddedKafkaCluster newKafkaCluster() { + return provisionWith(newClusterConfig() + .configure(newKafkaConfig() + .build()) + .build()); + } + + public static EmbeddedKafkaConfigBuilder newKafkaConfig() { + return brokers() + .with("transaction.state.log.num.partitions", 10) + .with("offsets.topic.num.partitions", 10); + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/KafkaTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/KafkaTest.java new file mode 100644 index 00000000..1e75ef40 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/KafkaTest.java @@ -0,0 +1,62 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + +import com.bakdata.kafka.KafkaEndpointConfig; +import com.bakdata.kafka.TestUtil; +import com.bakdata.schemaregistrymock.junit5.SchemaRegistryMockExtension; +import net.mguenther.kafka.junit.EmbeddedKafkaCluster; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.RegisterExtension; + +abstract class KafkaTest { + @RegisterExtension + final SchemaRegistryMockExtension schemaRegistryMockExtension = new SchemaRegistryMockExtension(); + final EmbeddedKafkaCluster kafkaCluster = TestUtil.newKafkaCluster(); + + @BeforeEach + void setup() { + this.kafkaCluster.start(); + } + + @AfterEach + void tearDown() { + this.kafkaCluster.stop(); + } + + KafkaEndpointConfig createEndpointWithoutSchemaRegistry() { + return KafkaEndpointConfig.builder() + .brokers(this.kafkaCluster.getBrokerList()) + .build(); + } + + KafkaEndpointConfig createEndpoint() { + return KafkaEndpointConfig.builder() + .brokers(this.kafkaCluster.getBrokerList()) + .schemaRegistryUrl(this.schemaRegistryMockExtension.getUrl()) + .build(); + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/ProducerCleanUpRunnerTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/ProducerCleanUpRunnerTest.java new file mode 100644 index 00000000..05f3381c --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/ProducerCleanUpRunnerTest.java @@ -0,0 +1,175 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + + +import static com.bakdata.kafka.integration.ProducerRunnerTest.configureApp; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +import com.bakdata.kafka.CleanUpRunner; +import com.bakdata.kafka.ConfiguredProducerApp; +import com.bakdata.kafka.EffectiveAppConfiguration; +import com.bakdata.kafka.ExecutableApp; +import com.bakdata.kafka.ExecutableProducerApp; +import com.bakdata.kafka.HasTopicHooks.TopicHook; +import com.bakdata.kafka.ProducerApp; +import com.bakdata.kafka.ProducerCleanUpConfiguration; +import com.bakdata.kafka.ProducerTopicConfig; +import com.bakdata.kafka.Runner; +import com.bakdata.kafka.test_applications.AvroKeyProducer; +import com.bakdata.kafka.test_applications.AvroValueProducer; +import com.bakdata.kafka.test_applications.StringProducer; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import java.io.IOException; +import java.util.List; +import net.mguenther.kafka.junit.KeyValue; +import net.mguenther.kafka.junit.ReadKeyValues; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(SoftAssertionsExtension.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class ProducerCleanUpRunnerTest extends KafkaTest { + @InjectSoftAssertions + private SoftAssertions softly; + @Mock + private TopicHook topicHook; + + static ConfiguredProducerApp createStringApplication() { + return configureApp(new StringProducer(), ProducerTopicConfig.builder() + .outputTopic("output") + .build()); + } + + private static ConfiguredProducerApp createAvroKeyApplication() { + return configureApp(new AvroKeyProducer(), ProducerTopicConfig.builder() + .outputTopic("output") + .build()); + } + + private static ConfiguredProducerApp createAvroValueApplication() { + return configureApp(new AvroValueProducer(), ProducerTopicConfig.builder() + .outputTopic("output") + .build()); + } + + private static void clean(final ExecutableApp app) { + app.createCleanUpRunner().clean(); + } + + private static void run(final ExecutableApp executableApp) { + executableApp.createRunner().run(); + } + + @Test + void shouldDeleteTopic() throws InterruptedException { + try (final ConfiguredProducerApp app = createStringApplication(); + final ExecutableProducerApp executableApp = app.withEndpoint( + this.createEndpointWithoutSchemaRegistry())) { + run(executableApp); + + final List> output = this.readOutputTopic(app.getTopics().getOutputTopic()); + this.softly.assertThat(output) + .containsExactlyInAnyOrderElementsOf(List.of(new KeyValue<>("foo", "bar"))); + + clean(executableApp); + + this.softly.assertThat(this.kafkaCluster.exists(app.getTopics().getOutputTopic())) + .as("Output topic is deleted") + .isFalse(); + } + } + + @Test + void shouldDeleteValueSchema() throws IOException, RestClientException { + try (final ConfiguredProducerApp app = createAvroValueApplication(); + final ExecutableProducerApp executableApp = app.withEndpoint(this.createEndpoint()); + final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient()) { + run(executableApp); + + final String outputTopic = app.getTopics().getOutputTopic(); + this.softly.assertThat(client.getAllSubjects()) + .contains(outputTopic + "-value"); + clean(executableApp); + this.softly.assertThat(client.getAllSubjects()) + .doesNotContain(outputTopic + "-value"); + } + } + + @Test + void shouldDeleteKeySchema() throws IOException, RestClientException { + try (final ConfiguredProducerApp app = createAvroKeyApplication(); + final ExecutableProducerApp executableApp = app.withEndpoint(this.createEndpoint()); + final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient()) { + run(executableApp); + + final String outputTopic = app.getTopics().getOutputTopic(); + this.softly.assertThat(client.getAllSubjects()) + .contains(outputTopic + "-key"); + clean(executableApp); + this.softly.assertThat(client.getAllSubjects()) + .doesNotContain(outputTopic + "-key"); + } + } + + @Test + void shouldCallCleanUpHookForAllTopics() { + try (final ConfiguredProducerApp app = this.createCleanUpHookApplication(); + final ExecutableProducerApp executableApp = app.withEndpoint(this.createEndpoint())) { + clean(executableApp); + verify(this.topicHook).deleted(app.getTopics().getOutputTopic()); + verifyNoMoreInteractions(this.topicHook); + } + } + + private ConfiguredProducerApp createCleanUpHookApplication() { + return configureApp(new StringProducer() { + @Override + public ProducerCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration configuration) { + return super.setupCleanUp(configuration) + .registerTopicHook(ProducerCleanUpRunnerTest.this.topicHook); + } + }, ProducerTopicConfig.builder() + .outputTopic("output") + .build()); + } + + private List> readOutputTopic(final String outputTopic) throws InterruptedException { + final ReadKeyValues readRequest = ReadKeyValues.from(outputTopic).build(); + return this.kafkaCluster.read(readRequest); + } + +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/ProducerRunnerTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/ProducerRunnerTest.java new file mode 100644 index 00000000..1c0b23bd --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/ProducerRunnerTest.java @@ -0,0 +1,76 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + +import static com.bakdata.kafka.integration.ProducerCleanUpRunnerTest.createStringApplication; + +import com.bakdata.kafka.AppConfiguration; +import com.bakdata.kafka.ConfiguredProducerApp; +import com.bakdata.kafka.ProducerApp; +import com.bakdata.kafka.ProducerRunner; +import com.bakdata.kafka.ProducerTopicConfig; +import java.util.List; +import net.mguenther.kafka.junit.KeyValue; +import net.mguenther.kafka.junit.ReadKeyValues; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(SoftAssertionsExtension.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class ProducerRunnerTest extends KafkaTest { + @InjectSoftAssertions + private SoftAssertions softly; + + static ConfiguredProducerApp configureApp(final ProducerApp app, final ProducerTopicConfig topics) { + final AppConfiguration configuration = new AppConfiguration<>(topics); + return new ConfiguredProducerApp<>(app, configuration); + } + + @Test + void shouldRunApp() throws InterruptedException { + try (final ConfiguredProducerApp app = createStringApplication(); + final ProducerRunner runner = app.withEndpoint(this.createEndpointWithoutSchemaRegistry()) + .createRunner()) { + runner.run(); + + final List> output = this.readOutputTopic(app.getTopics().getOutputTopic()); + this.softly.assertThat(output) + .containsExactlyInAnyOrderElementsOf(List.of(new KeyValue<>("foo", "bar"))); + } + } + + private List> readOutputTopic(final String outputTopic) throws InterruptedException { + final ReadKeyValues readRequest = ReadKeyValues.from(outputTopic).build(); + return this.kafkaCluster.read(readRequest); + } + +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpRunnerTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpRunnerTest.java new file mode 100644 index 00000000..a80ae6cc --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpRunnerTest.java @@ -0,0 +1,642 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + + +import static com.bakdata.kafka.integration.StreamsRunnerTest.configureApp; +import static net.mguenther.kafka.junit.Wait.delay; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +import com.bakdata.kafka.CleanUpException; +import com.bakdata.kafka.CleanUpRunner; +import com.bakdata.kafka.ConfiguredStreamsApp; +import com.bakdata.kafka.EffectiveAppConfiguration; +import com.bakdata.kafka.ExecutableApp; +import com.bakdata.kafka.ExecutableStreamsApp; +import com.bakdata.kafka.HasTopicHooks.TopicHook; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsCleanUpConfiguration; +import com.bakdata.kafka.StreamsCleanUpRunner; +import com.bakdata.kafka.StreamsRunner; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TestRecord; +import com.bakdata.kafka.test_applications.ComplexTopologyApplication; +import com.bakdata.kafka.test_applications.MirrorKeyWithAvro; +import com.bakdata.kafka.test_applications.MirrorValueWithAvro; +import com.bakdata.kafka.test_applications.WordCount; +import com.bakdata.kafka.test_applications.WordCountPattern; +import com.bakdata.kafka.util.ConsumerGroupClient; +import com.bakdata.kafka.util.ImprovedAdminClient; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; +import io.confluent.kafka.serializers.KafkaAvroSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import net.mguenther.kafka.junit.KeyValue; +import net.mguenther.kafka.junit.ReadKeyValues; +import net.mguenther.kafka.junit.SendKeyValuesTransactional; +import net.mguenther.kafka.junit.SendValuesTransactional; +import net.mguenther.kafka.junit.TopicConfig; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.LongDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(SoftAssertionsExtension.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class StreamsCleanUpRunnerTest extends KafkaTest { + private static final int TIMEOUT_SECONDS = 10; + @InjectSoftAssertions + private SoftAssertions softly; + @Mock + private TopicHook topicHook; + + private static ConfiguredStreamsApp createWordCountPatternApplication() { + return configureApp(new WordCountPattern(), StreamsTopicConfig.builder() + .inputPattern(Pattern.compile(".*_topic")) + .outputTopic("word_output") + .build()); + } + + private static ConfiguredStreamsApp createWordCountApplication() { + return configureApp(new WordCount(), StreamsTopicConfig.builder() + .inputTopics(List.of("word_input")) + .outputTopic("word_output") + .build()); + } + + private static ConfiguredStreamsApp createMirrorValueApplication() { + return configureApp(new MirrorValueWithAvro(), StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build()); + } + + private static ConfiguredStreamsApp createMirrorKeyApplication() { + return configureApp(new MirrorKeyWithAvro(), StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build()); + } + + private static void reset(final ExecutableApp app) { + app.createCleanUpRunner().reset(); + } + + private static void clean(final ExecutableApp app) { + app.createCleanUpRunner().clean(); + } + + private static void run(final ExecutableApp app) throws InterruptedException { + try (final StreamsRunner runner = app.createRunner()) { + StreamsRunnerTest.run(runner); + // Wait until stream application has consumed all data + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + } + + @Test + void shouldDeleteTopic() throws InterruptedException { + try (final ConfiguredStreamsApp app = createWordCountApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint( + this.createEndpointWithoutSchemaRegistry())) { + final SendValuesTransactional sendRequest = SendValuesTransactional + .inTransaction(app.getTopics().getInputTopics().get(0), List.of("blub", "bla", "blub")) + .useDefaults(); + this.kafkaCluster.send(sendRequest); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + final List> expectedValues = + List.of(new KeyValue<>("blub", 1L), + new KeyValue<>("bla", 1L), + new KeyValue<>("blub", 2L) + ); + + run(executableApp); + this.assertContent(app.getTopics().getOutputTopic(), expectedValues, + "WordCount contains all elements after first run"); + + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + clean(executableApp); + + this.softly.assertThat(this.kafkaCluster.exists(app.getTopics().getOutputTopic())) + .as("Output topic is deleted") + .isFalse(); + } + } + + @Test + void shouldDeleteConsumerGroup() throws InterruptedException { + try (final ConfiguredStreamsApp app = createWordCountApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint( + this.createEndpointWithoutSchemaRegistry())) { + final SendValuesTransactional sendRequest = SendValuesTransactional + .inTransaction(app.getTopics().getInputTopics().get(0), List.of("blub", "bla", "blub")) + .useDefaults(); + this.kafkaCluster.send(sendRequest); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + final List> expectedValues = + List.of(new KeyValue<>("blub", 1L), + new KeyValue<>("bla", 1L), + new KeyValue<>("blub", 2L) + ); + + run(executableApp); + this.assertContent(app.getTopics().getOutputTopic(), expectedValues, + "WordCount contains all elements after first run"); + + try (final ConsumerGroupClient adminClient = this.createAdminClient().getConsumerGroupClient()) { + this.softly.assertThat(adminClient.exists(app.getUniqueAppId())) + .as("Consumer group exists") + .isTrue(); + } + + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + clean(executableApp); + + try (final ConsumerGroupClient adminClient = this.createAdminClient().getConsumerGroupClient()) { + this.softly.assertThat(adminClient.exists(app.getUniqueAppId())) + .as("Consumer group is deleted") + .isFalse(); + } + } + } + + @Test + void shouldNotThrowAnErrorIfConsumerGroupDoesNotExist() throws InterruptedException { + try (final ConfiguredStreamsApp app = createWordCountApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint( + this.createEndpointWithoutSchemaRegistry())) { + final SendValuesTransactional sendRequest = SendValuesTransactional + .inTransaction(app.getTopics().getInputTopics().get(0), List.of("blub", "bla", "blub")) + .useDefaults(); + this.kafkaCluster.send(sendRequest); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + final List> expectedValues = + List.of(new KeyValue<>("blub", 1L), + new KeyValue<>("bla", 1L), + new KeyValue<>("blub", 2L) + ); + + run(executableApp); + this.assertContent(app.getTopics().getOutputTopic(), expectedValues, + "WordCount contains all elements after first run"); + + try (final ConsumerGroupClient adminClient = this.createAdminClient().getConsumerGroupClient()) { + this.softly.assertThat(adminClient.exists(app.getUniqueAppId())) + .as("Consumer group exists") + .isTrue(); + } + + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + try (final ConsumerGroupClient adminClient = this.createAdminClient().getConsumerGroupClient()) { + adminClient.deleteConsumerGroup(app.getUniqueAppId()); + this.softly.assertThat(adminClient.exists(app.getUniqueAppId())) + .as("Consumer group is deleted") + .isFalse(); + } + this.softly.assertThatCode(() -> clean(executableApp)).doesNotThrowAnyException(); + } + } + + @Test + void shouldDeleteInternalTopics() throws InterruptedException { + try (final ConfiguredStreamsApp app = this.createComplexApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint())) { + + final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); + final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional + .inTransaction(app.getTopics().getInputTopics().get(0), + Collections.singletonList(new KeyValue<>("key 1", testRecord))) + .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + this.schemaRegistryMockExtension.getUrl()) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) + .build(); + this.kafkaCluster.send(sendRequest); + + run(executableApp); + + final List inputTopics = app.getTopics().getInputTopics(); + final String uniqueAppId = app.getUniqueAppId(); + final String internalTopic = + uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition"; + final String backingTopic = + uniqueAppId + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog"; + final String manualTopic = ComplexTopologyApplication.THROUGH_TOPIC; + + for (final String inputTopic : inputTopics) { + this.softly.assertThat(this.kafkaCluster.exists(inputTopic)).isTrue(); + } + this.softly.assertThat(this.kafkaCluster.exists(internalTopic)).isTrue(); + this.softly.assertThat(this.kafkaCluster.exists(backingTopic)).isTrue(); + this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isTrue(); + + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + reset(executableApp); + + for (final String inputTopic : inputTopics) { + this.softly.assertThat(this.kafkaCluster.exists(inputTopic)).isTrue(); + } + this.softly.assertThat(this.kafkaCluster.exists(internalTopic)).isFalse(); + this.softly.assertThat(this.kafkaCluster.exists(backingTopic)).isFalse(); + this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isTrue(); + } + } + + @Test + void shouldDeleteIntermediateTopics() throws InterruptedException { + try (final ConfiguredStreamsApp app = this.createComplexApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint())) { + + final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); + final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional + .inTransaction(app.getTopics().getInputTopics().get(0), + Collections.singletonList(new KeyValue<>("key 1", testRecord))) + .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + this.schemaRegistryMockExtension.getUrl()) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) + .build(); + this.kafkaCluster.send(sendRequest); + + run(executableApp); + + final List inputTopics = app.getTopics().getInputTopics(); + final String manualTopic = ComplexTopologyApplication.THROUGH_TOPIC; + + for (final String inputTopic : inputTopics) { + this.softly.assertThat(this.kafkaCluster.exists(inputTopic)).isTrue(); + } + this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isTrue(); + + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + clean(executableApp); + + for (final String inputTopic : inputTopics) { + this.softly.assertThat(this.kafkaCluster.exists(inputTopic)).isTrue(); + } + this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isFalse(); + } + } + + @Test + void shouldDeleteState() throws InterruptedException { + try (final ConfiguredStreamsApp app = createWordCountApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint( + this.createEndpointWithoutSchemaRegistry())) { + final SendValuesTransactional sendRequest = SendValuesTransactional + .inTransaction(app.getTopics().getInputTopics().get(0), List.of("blub", "bla", "blub")) + .useDefaults(); + this.kafkaCluster.send(sendRequest); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + final List> expectedValues = + List.of(new KeyValue<>("blub", 1L), + new KeyValue<>("bla", 1L), + new KeyValue<>("blub", 2L) + ); + + run(executableApp); + this.assertContent(app.getTopics().getOutputTopic(), expectedValues, + "All entries are once in the input topic after the 1st run"); + + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + reset(executableApp); + + run(executableApp); + final List> entriesTwice = expectedValues.stream() + .flatMap(entry -> Stream.of(entry, entry)) + .collect(Collectors.toList()); + this.assertContent(app.getTopics().getOutputTopic(), entriesTwice, + "All entries are twice in the input topic after the 2nd run"); + } + } + + @Test + void shouldReprocessAlreadySeenRecords() throws InterruptedException { + try (final ConfiguredStreamsApp app = createWordCountApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint( + this.createEndpointWithoutSchemaRegistry())) { + final SendValuesTransactional sendRequest = SendValuesTransactional + .inTransaction(app.getTopics().getInputTopics().get(0), List.of("a", "b", "c")) + .useDefaults(); + this.kafkaCluster.send(sendRequest); + + run(executableApp); + this.assertSize(app.getTopics().getOutputTopic(), 3); + run(executableApp); + this.assertSize(app.getTopics().getOutputTopic(), 3); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + reset(executableApp); + + run(executableApp); + this.assertSize(app.getTopics().getOutputTopic(), 6); + } + } + + @Test + void shouldDeleteValueSchema() + throws InterruptedException, IOException, RestClientException { + try (final ConfiguredStreamsApp app = createMirrorValueApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint()); + final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient()) { + final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); + final String inputTopic = app.getTopics().getInputTopics().get(0); + final SendValuesTransactional sendRequest = SendValuesTransactional + .inTransaction(inputTopic, Collections.singletonList(testRecord)) + .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + this.schemaRegistryMockExtension.getUrl()) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) + .build(); + this.kafkaCluster.send(sendRequest); + + run(executableApp); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + final String outputTopic = app.getTopics().getOutputTopic(); + this.softly.assertThat(client.getAllSubjects()) + .contains(outputTopic + "-value", inputTopic + "-value"); + clean(executableApp); + this.softly.assertThat(client.getAllSubjects()) + .doesNotContain(outputTopic + "-value") + .contains(inputTopic + "-value"); + } + } + + @Test + void shouldDeleteKeySchema() + throws InterruptedException, IOException, RestClientException { + try (final ConfiguredStreamsApp app = createMirrorKeyApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint()); + final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient()) { + final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); + final String inputTopic = app.getTopics().getInputTopics().get(0); + final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional + .inTransaction(inputTopic, Collections.singletonList(new KeyValue<>(testRecord, "val"))) + .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + this.schemaRegistryMockExtension.getUrl()) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) + .build(); + this.kafkaCluster.send(sendRequest); + + run(executableApp); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + final String outputTopic = app.getTopics().getOutputTopic(); + this.softly.assertThat(client.getAllSubjects()) + .contains(outputTopic + "-key", inputTopic + "-key"); + clean(executableApp); + this.softly.assertThat(client.getAllSubjects()) + .doesNotContain(outputTopic + "-key") + .contains(inputTopic + "-key"); + } + } + + @Test + void shouldDeleteSchemaOfInternalTopics() + throws InterruptedException, IOException, RestClientException { + try (final ConfiguredStreamsApp app = this.createComplexApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint()); + final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient()) { + final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); + final String inputTopic = app.getTopics().getInputTopics().get(0); + final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional + .inTransaction(inputTopic, Collections.singletonList(new KeyValue<>("key 1", testRecord))) + .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + this.schemaRegistryMockExtension.getUrl()) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) + .build(); + this.kafkaCluster.send(sendRequest); + + run(executableApp); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + final String inputSubject = inputTopic + "-value"; + final String uniqueAppId = app.getUniqueAppId(); + final String internalSubject = + uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition" + "-value"; + final String backingSubject = + uniqueAppId + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog" + "-value"; + final String manualSubject = ComplexTopologyApplication.THROUGH_TOPIC + "-value"; + this.softly.assertThat(client.getAllSubjects()) + .contains(inputSubject, internalSubject, backingSubject, manualSubject); + reset(executableApp); + + this.softly.assertThat(client.getAllSubjects()) + .doesNotContain(internalSubject, backingSubject) + .contains(inputSubject, manualSubject); + } + } + + + @Test + void shouldDeleteSchemaOfIntermediateTopics() + throws InterruptedException, IOException, RestClientException { + try (final ConfiguredStreamsApp app = this.createComplexApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint()); + final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient()) { + final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); + final String inputTopic = app.getTopics().getInputTopics().get(0); + final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional + .inTransaction(inputTopic, Collections.singletonList(new KeyValue<>("key 1", testRecord))) + .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + this.schemaRegistryMockExtension.getUrl()) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) + .build(); + this.kafkaCluster.send(sendRequest); + + run(executableApp); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + final String inputSubject = inputTopic + "-value"; + final String manualSubject = ComplexTopologyApplication.THROUGH_TOPIC + "-value"; + this.softly.assertThat(client.getAllSubjects()) + .contains(inputSubject, manualSubject); + clean(executableApp); + + this.softly.assertThat(client.getAllSubjects()) + .doesNotContain(manualSubject) + .contains(inputSubject); + } + } + + @Test + void shouldCallCleanupHookForInternalTopics() { + try (final ConfiguredStreamsApp app = this.createComplexCleanUpHookApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint())) { + reset(executableApp); + final String uniqueAppId = app.getUniqueAppId(); + verify(this.topicHook).deleted(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition"); + verify(this.topicHook).deleted(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-changelog"); + verify(this.topicHook).deleted(uniqueAppId + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog"); + verifyNoMoreInteractions(this.topicHook); + } + } + + @Test + void shouldCallCleanUpHookForAllTopics() { + try (final ConfiguredStreamsApp app = this.createComplexCleanUpHookApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint())) { + clean(executableApp); + final String uniqueAppId = app.getUniqueAppId(); + verify(this.topicHook).deleted(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition"); + verify(this.topicHook).deleted(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-changelog"); + verify(this.topicHook).deleted(uniqueAppId + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog"); + verify(this.topicHook).deleted(ComplexTopologyApplication.THROUGH_TOPIC); + verify(this.topicHook).deleted(app.getTopics().getOutputTopic()); + verifyNoMoreInteractions(this.topicHook); + } + } + + @Test + void shouldNotThrowExceptionOnMissingInputTopic() throws InterruptedException { + try (final ConfiguredStreamsApp app = createMirrorKeyApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint(this.createEndpoint())) { + // if we don't run the app, the coordinator will be unavailable + run(executableApp); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + this.softly.assertThatCode(() -> clean(executableApp)).doesNotThrowAnyException(); + } + } + + @Test + void shouldThrowExceptionOnResetterError() throws InterruptedException { + try (final ConfiguredStreamsApp app = createMirrorKeyApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint( + this.createEndpoint()); + final StreamsRunner runner = executableApp.createRunner()) { + this.kafkaCluster.createTopic(TopicConfig.withName(app.getTopics().getInputTopics().get(0)).useDefaults()); + StreamsRunnerTest.run(runner); + // Wait until stream application has consumed all data + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + // should throw exception because consumer group is still active + this.softly.assertThatThrownBy(() -> reset(executableApp)) + .isInstanceOf(CleanUpException.class) + .hasMessageContaining("Error running streams resetter. Exit code 1"); + } + } + + @Test + void shouldReprocessAlreadySeenRecordsWithPattern() throws InterruptedException { + try (final ConfiguredStreamsApp app = createWordCountPatternApplication(); + final ExecutableStreamsApp executableApp = app.withEndpoint( + this.createEndpointWithoutSchemaRegistry())) { + this.kafkaCluster.send(SendValuesTransactional.inTransaction("input_topic", + Arrays.asList("a", "b")).useDefaults()); + this.kafkaCluster.send(SendValuesTransactional.inTransaction("another_topic", + List.of("c")).useDefaults()); + + run(executableApp); + this.assertSize(app.getTopics().getOutputTopic(), 3); + run(executableApp); + this.assertSize(app.getTopics().getOutputTopic(), 3); + + // Wait until all stream application are completely stopped before triggering cleanup + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + reset(executableApp); + + run(executableApp); + this.assertSize(app.getTopics().getOutputTopic(), 6); + } + } + + private ConfiguredStreamsApp createComplexApplication() { + this.kafkaCluster.createTopic(TopicConfig.withName(ComplexTopologyApplication.THROUGH_TOPIC).useDefaults()); + return configureApp(new ComplexTopologyApplication(), StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build()); + } + + private ConfiguredStreamsApp createComplexCleanUpHookApplication() { + this.kafkaCluster.createTopic(TopicConfig.withName(ComplexTopologyApplication.THROUGH_TOPIC).useDefaults()); + return configureApp(new ComplexTopologyApplication() { + @Override + public StreamsCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration configuration) { + return super.setupCleanUp(configuration) + .registerTopicHook(StreamsCleanUpRunnerTest.this.topicHook); + } + }, StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build()); + } + + private ImprovedAdminClient createAdminClient() { + return ImprovedAdminClient.create(this.createEndpoint().createKafkaProperties()); + } + + private List> readOutputTopic(final String outputTopic) throws InterruptedException { + final ReadKeyValues readRequest = ReadKeyValues.from(outputTopic, Long.class) + .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class).build(); + return this.kafkaCluster.read(readRequest); + } + + private void assertContent(final String outputTopic, + final Iterable> expectedValues, final String description) + throws InterruptedException { + final List> output = this.readOutputTopic(outputTopic); + this.softly.assertThat(output) + .as(description) + .containsExactlyInAnyOrderElementsOf(expectedValues); + } + + private void assertSize(final String outputTopic, final int expectedMessageCount) throws InterruptedException { + final List> records = this.readOutputTopic(outputTopic); + this.softly.assertThat(records).hasSize(expectedMessageCount); + } + +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/StreamsRunnerTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/StreamsRunnerTest.java new file mode 100644 index 00000000..555290e1 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/integration/StreamsRunnerTest.java @@ -0,0 +1,254 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.integration; + +import static net.mguenther.kafka.junit.Wait.delay; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.bakdata.kafka.AppConfiguration; +import com.bakdata.kafka.ConfiguredStreamsApp; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsExecutionOptions; +import com.bakdata.kafka.StreamsRunner; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; +import com.bakdata.kafka.test_applications.ExtraInputTopics; +import com.bakdata.kafka.test_applications.Mirror; +import java.lang.Thread.UncaughtExceptionHandler; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import lombok.Getter; +import net.mguenther.kafka.junit.KeyValue; +import net.mguenther.kafka.junit.ReadKeyValues; +import net.mguenther.kafka.junit.SendKeyValuesTransactional; +import net.mguenther.kafka.junit.TopicConfig; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.streams.KafkaStreams.State; +import org.apache.kafka.streams.KafkaStreams.StateListener; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.errors.MissingSourceTopicException; +import org.apache.kafka.streams.errors.StreamsException; +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse; +import org.apache.kafka.streams.kstream.KStream; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(SoftAssertionsExtension.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +class StreamsRunnerTest extends KafkaTest { + private static final int TIMEOUT_SECONDS = 10; + @Mock + private StreamsUncaughtExceptionHandler uncaughtExceptionHandler; + @Mock + private StateListener stateListener; + @InjectSoftAssertions + private SoftAssertions softly; + + static Thread run(final StreamsRunner runner) { + // run in Thread because the application blocks indefinitely + final Thread thread = new Thread(runner); + final UncaughtExceptionHandler handler = new CapturingUncaughtExceptionHandler(); + thread.setUncaughtExceptionHandler(handler); + thread.start(); + return thread; + } + + static ConfiguredStreamsApp configureApp(final StreamsApp app, final StreamsTopicConfig topics) { + final AppConfiguration configuration = new AppConfiguration<>(topics, Map.of( + StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, "0", + ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000" + )); + return new ConfiguredStreamsApp<>(app, configuration); + } + + private static ConfiguredStreamsApp createMirrorApplication() { + return configureApp(new Mirror(), StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build()); + } + + private static ConfiguredStreamsApp createExtraInputTopicsApplication() { + return configureApp(new ExtraInputTopics(), StreamsTopicConfig.builder() + .extraInputTopics(Map.of("role", List.of("input1", "input2"))) + .outputTopic("output") + .build()); + } + + private static ConfiguredStreamsApp createErrorApplication() { + return configureApp(new ErrorApplication(), StreamsTopicConfig.builder() + .inputTopics(List.of("input")) + .outputTopic("output") + .build()); + } + + @Test + void shouldRunApp() throws InterruptedException { + try (final ConfiguredStreamsApp app = createMirrorApplication(); + final StreamsRunner runner = app.withEndpoint(this.createEndpointWithoutSchemaRegistry()) + .createRunner()) { + final String inputTopic = app.getTopics().getInputTopics().get(0); + this.kafkaCluster.createTopic(TopicConfig.withName(inputTopic).useDefaults()); + final String outputTopic = app.getTopics().getOutputTopic(); + this.kafkaCluster.createTopic(TopicConfig.withName(outputTopic).useDefaults()); + run(runner); + final SendKeyValuesTransactional kvSendKeyValuesTransactionalBuilder = + SendKeyValuesTransactional.inTransaction(inputTopic, List.of(new KeyValue<>("foo", "bar"))) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .build(); + this.kafkaCluster.send(kvSendKeyValuesTransactionalBuilder); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + this.softly.assertThat(this.kafkaCluster.read(ReadKeyValues.from(outputTopic, String.class, String.class) + .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) + .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) + .build())) + .hasSize(1); + } + } + + @Test + void shouldUseMultipleExtraInputTopics() throws InterruptedException { + try (final ConfiguredStreamsApp app = createExtraInputTopicsApplication(); + final StreamsRunner runner = app.withEndpoint(this.createEndpointWithoutSchemaRegistry()) + .createRunner()) { + final List inputTopics = app.getTopics().getExtraInputTopics().get("role"); + final String inputTopic1 = inputTopics.get(0); + final String inputTopic2 = inputTopics.get(1); + final String outputTopic = app.getTopics().getOutputTopic(); + this.kafkaCluster.createTopic(TopicConfig.withName(inputTopic1).useDefaults()); + this.kafkaCluster.createTopic(TopicConfig.withName(inputTopic2).useDefaults()); + this.kafkaCluster.createTopic(TopicConfig.withName(outputTopic).useDefaults()); + run(runner); + this.kafkaCluster.send( + SendKeyValuesTransactional.inTransaction(inputTopic1, List.of(new KeyValue<>("foo", "bar"))) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .build()); + this.kafkaCluster.send( + SendKeyValuesTransactional.inTransaction(inputTopic2, List.of(new KeyValue<>("foo", "baz"))) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .build()); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + this.softly.assertThat(this.kafkaCluster.read(ReadKeyValues.from(outputTopic, String.class, String.class) + .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) + .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) + .build())) + .hasSize(2); + } + } + + @Test + void shouldThrowOnMissingInputTopic() throws InterruptedException { + when(this.uncaughtExceptionHandler.handle(any())).thenReturn(StreamThreadExceptionResponse.SHUTDOWN_CLIENT); + try (final ConfiguredStreamsApp app = createMirrorApplication(); + final StreamsRunner runner = app.withEndpoint(this.createEndpointWithoutSchemaRegistry()) + .createRunner(StreamsExecutionOptions.builder() + .stateListener(() -> this.stateListener) + .uncaughtExceptionHandler(() -> this.uncaughtExceptionHandler) + .build())) { + final Thread thread = run(runner); + final CapturingUncaughtExceptionHandler handler = + (CapturingUncaughtExceptionHandler) thread.getUncaughtExceptionHandler(); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + this.softly.assertThat(thread.isAlive()).isFalse(); + this.softly.assertThat(handler.getLastException()).isInstanceOf(MissingSourceTopicException.class); + verify(this.uncaughtExceptionHandler).handle(any()); + verify(this.stateListener).onChange(State.ERROR, State.PENDING_ERROR); + } + } + + @Test + void shouldCloseOnMapError() throws InterruptedException { + when(this.uncaughtExceptionHandler.handle(any())).thenReturn(StreamThreadExceptionResponse.SHUTDOWN_CLIENT); + try (final ConfiguredStreamsApp app = createErrorApplication(); + final StreamsRunner runner = app.withEndpoint(this.createEndpointWithoutSchemaRegistry()) + .createRunner(StreamsExecutionOptions.builder() + .stateListener(() -> this.stateListener) + .uncaughtExceptionHandler(() -> this.uncaughtExceptionHandler) + .build())) { + final String inputTopic = app.getTopics().getInputTopics().get(0); + this.kafkaCluster.createTopic(TopicConfig.withName(inputTopic).useDefaults()); + final String outputTopic = app.getTopics().getOutputTopic(); + this.kafkaCluster.createTopic(TopicConfig.withName(outputTopic).useDefaults()); + final Thread thread = run(runner); + final CapturingUncaughtExceptionHandler handler = + (CapturingUncaughtExceptionHandler) thread.getUncaughtExceptionHandler(); + final SendKeyValuesTransactional kvSendKeyValuesTransactionalBuilder = + SendKeyValuesTransactional.inTransaction(inputTopic, List.of(new KeyValue<>("foo", "bar"))) + .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) + .build(); + this.kafkaCluster.send(kvSendKeyValuesTransactionalBuilder); + delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); + this.softly.assertThat(thread.isAlive()).isFalse(); + this.softly.assertThat(handler.getLastException()).isInstanceOf(StreamsException.class) + .satisfies(e -> this.softly.assertThat(e.getCause()).hasMessage("Error in map")); + verify(this.uncaughtExceptionHandler).handle(any()); + verify(this.stateListener).onChange(State.ERROR, State.PENDING_ERROR); + } + } + + @Getter + private static class CapturingUncaughtExceptionHandler implements UncaughtExceptionHandler { + private Throwable lastException; + + @Override + public void uncaughtException(final Thread t, final Throwable e) { + this.lastException = e; + } + } + + private static class ErrorApplication implements StreamsApp { + + @Override + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); + input.map((k, v) -> {throw new RuntimeException("Error in map");}) + .to(builder.getTopics().getOutputTopic()); + } + + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); + } + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/AvroKeyProducer.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/AvroKeyProducer.java new file mode 100644 index 00000000..5848accc --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/AvroKeyProducer.java @@ -0,0 +1,54 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.test_applications; + +import com.bakdata.kafka.ProducerApp; +import com.bakdata.kafka.ProducerBuilder; +import com.bakdata.kafka.ProducerRunnable; +import com.bakdata.kafka.TestRecord; +import java.util.Map; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; + +public class AvroKeyProducer implements ProducerApp { + @Override + public ProducerRunnable buildRunnable(final ProducerBuilder builder) { + return () -> { + try (final Producer producer = builder.createProducer()) { + producer.send(new ProducerRecord<>(builder.getTopics().getOutputTopic(), + TestRecord.newBuilder().setContent("key").build(), "value")); + } + }; + } + + @Override + public Map createKafkaProperties() { + return Map.of( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class + ); + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/AvroValueProducer.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/AvroValueProducer.java new file mode 100644 index 00000000..64356ce2 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/AvroValueProducer.java @@ -0,0 +1,54 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.test_applications; + +import com.bakdata.kafka.ProducerApp; +import com.bakdata.kafka.ProducerBuilder; +import com.bakdata.kafka.ProducerRunnable; +import com.bakdata.kafka.TestRecord; +import java.util.Map; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; + +public class AvroValueProducer implements ProducerApp { + @Override + public ProducerRunnable buildRunnable(final ProducerBuilder builder) { + return () -> { + try (final Producer producer = builder.createProducer()) { + producer.send(new ProducerRecord<>(builder.getTopics().getOutputTopic(), "key", + TestRecord.newBuilder().setContent("value").build())); + } + }; + } + + @Override + public Map createKafkaProperties() { + return Map.of( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class + ); + } +} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/ComplexTopologyApplication.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/ComplexTopologyApplication.java similarity index 71% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/ComplexTopologyApplication.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/ComplexTopologyApplication.java index ef34f1e1..d321e3ca 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/ComplexTopologyApplication.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/ComplexTopologyApplication.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,15 +24,16 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; import com.bakdata.kafka.TestRecord; +import com.bakdata.kafka.TopologyBuilder; import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; import java.time.Duration; -import java.util.Properties; +import java.util.Map; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.Serdes.StringSerde; import org.apache.kafka.streams.KeyValue; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KTable; @@ -41,16 +42,16 @@ import org.apache.kafka.streams.kstream.TimeWindows; import org.apache.kafka.streams.kstream.Windowed; -public class ComplexTopologyApplication extends KafkaStreamsApplication { +public class ComplexTopologyApplication implements StreamsApp { public static final String THROUGH_TOPIC = "through-topic"; @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = builder.stream(this.getInputTopics()); + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); input.to(THROUGH_TOPIC); - final KStream through = builder.stream(THROUGH_TOPIC); + final KStream through = builder.getStreamsBuilder().stream(THROUGH_TOPIC); final KTable, TestRecord> reduce = through .groupByKey() .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofMillis(5L))) @@ -61,19 +62,19 @@ public void buildTopology(final StreamsBuilder builder) { .groupByKey() .count(Materialized.with(Serdes.String(), Serdes.Long())) .toStream() - .to(this.getOutputTopic(), Produced.with(Serdes.String(), Serdes.Long())); + .to(builder.getTopics().getOutputTopic(), Produced.with(Serdes.String(), Serdes.Long())); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } @Override - public Properties createKafkaProperties() { - final Properties kafkaConfig = super.createKafkaProperties(); - kafkaConfig.setProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class.getName()); - kafkaConfig.setProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class.getName()); - return kafkaConfig; + public Map createKafkaProperties() { + return Map.of( + StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class, + StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class + ); } } diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/ExtraInputTopics.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/ExtraInputTopics.java new file mode 100644 index 00000000..88c7d0e3 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/ExtraInputTopics.java @@ -0,0 +1,45 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.test_applications; + +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; +import lombok.NoArgsConstructor; +import org.apache.kafka.streams.kstream.KStream; + +@NoArgsConstructor +public class ExtraInputTopics implements StreamsApp { + @Override + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput("role"); + input.to(builder.getTopics().getOutputTopic()); + } + + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); + } +} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/ExtraInputTopics.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/Mirror.java similarity index 70% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/ExtraInputTopics.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/Mirror.java index 6a9e88a0..212c7611 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/ExtraInputTopics.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/Mirror.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,21 +24,23 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; import lombok.NoArgsConstructor; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.KStream; @NoArgsConstructor -public class ExtraInputTopics extends KafkaStreamsApplication { +public class Mirror implements StreamsApp { @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = builder.stream(this.getInputTopics("role")); - input.to(this.getOutputTopic()); + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); + input.to(builder.getTopics().getOutputTopic()); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } + } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorKeyWithAvro.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorKeyWithAvro.java similarity index 65% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorKeyWithAvro.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorKeyWithAvro.java index edf23655..67056d93 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorKeyWithAvro.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorKeyWithAvro.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,34 +24,35 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; import com.bakdata.kafka.TestRecord; +import com.bakdata.kafka.TopologyBuilder; import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; -import java.util.Properties; +import java.util.Map; import lombok.NoArgsConstructor; import org.apache.kafka.common.serialization.Serdes.StringSerde; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.kstream.KStream; @NoArgsConstructor -public class MirrorKeyWithAvro extends KafkaStreamsApplication { +public class MirrorKeyWithAvro implements StreamsApp { @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = builder.stream(this.getInputTopics()); - input.to(this.getOutputTopic()); + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); + input.to(builder.getTopics().getOutputTopic()); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } @Override - public Properties createKafkaProperties() { - final Properties kafkaConfig = super.createKafkaProperties(); - kafkaConfig.setProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class.getName()); - kafkaConfig.setProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class.getName()); - return kafkaConfig; + public Map createKafkaProperties() { + return Map.of( + StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class, + StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class + ); } } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorValueWithAvro.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorValueWithAvro.java similarity index 65% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorValueWithAvro.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorValueWithAvro.java index e2dc3a68..a2ee1aa8 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorValueWithAvro.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorValueWithAvro.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,34 +24,35 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; import com.bakdata.kafka.TestRecord; +import com.bakdata.kafka.TopologyBuilder; import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; -import java.util.Properties; +import java.util.Map; import lombok.NoArgsConstructor; import org.apache.kafka.common.serialization.Serdes.StringSerde; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.kstream.KStream; @NoArgsConstructor -public class MirrorValueWithAvro extends KafkaStreamsApplication { +public class MirrorValueWithAvro implements StreamsApp { @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = builder.stream(this.getInputTopics()); - input.to(this.getOutputTopic()); + public void buildTopology(final TopologyBuilder builder) { + final KStream input = builder.streamInput(); + input.to(builder.getTopics().getOutputTopic()); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } @Override - public Properties createKafkaProperties() { - final Properties kafkaConfig = super.createKafkaProperties(); - kafkaConfig.setProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class.getName()); - kafkaConfig.setProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class.getName()); - return kafkaConfig; + public Map createKafkaProperties() { + return Map.of( + StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class, + StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class + ); } } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorWithNonDefaultSerde.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorWithNonDefaultSerde.java similarity index 54% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorWithNonDefaultSerde.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorWithNonDefaultSerde.java index 23d2adcc..846e851f 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/MirrorWithNonDefaultSerde.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/MirrorWithNonDefaultSerde.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,45 +24,52 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.Configurator; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; import com.bakdata.kafka.TestRecord; +import com.bakdata.kafka.TopologyBuilder; import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; -import java.util.Properties; +import java.util.Map; import lombok.NoArgsConstructor; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes.StringSerde; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.Produced; @NoArgsConstructor -public class MirrorWithNonDefaultSerde extends KafkaStreamsApplication { - @Override - public void buildTopology(final StreamsBuilder builder) { - final Serde valueSerde = this.getValueSerde(); - final KStream input = - builder.stream(this.getInputTopics(), Consumed.with(null, valueSerde)); - input.to(this.getOutputTopic(), Produced.valueSerde(valueSerde)); +public class MirrorWithNonDefaultSerde implements StreamsApp { + + public static Serde newKeySerde() { + return new SpecificAvroSerde<>(); } - public Serde getValueSerde() { - final Serde valueSerde = new SpecificAvroSerde<>(); - valueSerde.configure(new StreamsConfig(this.getKafkaProperties()).originals(), false); - return valueSerde; + public static Serde newValueSerde() { + return new SpecificAvroSerde<>(); + } + + @Override + public void buildTopology(final TopologyBuilder builder) { + final Configurator configurator = builder.createConfigurator(); + final Serde valueSerde = configurator.configureForValues(newValueSerde()); + final Serde keySerde = configurator.configureForKeys(newKeySerde()); + final KStream input = + builder.streamInput(Consumed.with(keySerde, valueSerde)); + input.to(builder.getTopics().getOutputTopic(), Produced.with(keySerde, valueSerde)); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } @Override - public Properties createKafkaProperties() { - final Properties kafkaConfig = super.createKafkaProperties(); - kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); - kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); - return kafkaConfig; + public Map createKafkaProperties() { + return Map.of( + StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class, + StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class + ); } } diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/StringProducer.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/StringProducer.java new file mode 100644 index 00000000..10de4059 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/StringProducer.java @@ -0,0 +1,42 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.test_applications; + +import com.bakdata.kafka.ProducerApp; +import com.bakdata.kafka.ProducerBuilder; +import com.bakdata.kafka.ProducerRunnable; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerRecord; + +public class StringProducer implements ProducerApp { + @Override + public ProducerRunnable buildRunnable(final ProducerBuilder builder) { + return () -> { + try (final Producer producer = builder.createProducer()) { + producer.send(new ProducerRecord<>(builder.getTopics().getOutputTopic(), "foo", "bar")); + } + }; + } +} diff --git a/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/WordCount.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/WordCount.java new file mode 100644 index 00000000..2bcdc095 --- /dev/null +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/WordCount.java @@ -0,0 +1,59 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka.test_applications; + +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; +import java.util.Arrays; +import java.util.regex.Pattern; +import lombok.NoArgsConstructor; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.streams.kstream.KStream; +import org.apache.kafka.streams.kstream.KTable; +import org.apache.kafka.streams.kstream.Materialized; +import org.apache.kafka.streams.kstream.Produced; + +@NoArgsConstructor +public class WordCount implements StreamsApp { + + @Override + public void buildTopology(final TopologyBuilder builder) { + final KStream textLines = builder.streamInput(); + + final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); + final KTable wordCounts = textLines + .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) + .groupBy((key, word) -> word) + .count(Materialized.as("counts")); + + wordCounts.toStream().to(builder.getTopics().getOutputTopic(), Produced.valueSerde(Serdes.Long())); + } + + @Override + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); + } +} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/WordCountPattern.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/WordCountPattern.java similarity index 77% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/WordCountPattern.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/WordCountPattern.java index 23357935..9c86b0b8 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/test_applications/WordCountPattern.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/test_applications/WordCountPattern.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,27 +24,25 @@ package com.bakdata.kafka.test_applications; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; +import com.bakdata.kafka.TopologyBuilder; import java.util.Arrays; import java.util.regex.Pattern; import lombok.NoArgsConstructor; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; -import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KTable; import org.apache.kafka.streams.kstream.Materialized; import org.apache.kafka.streams.kstream.Produced; @NoArgsConstructor -public class WordCountPattern extends KafkaStreamsApplication { - public static void main(final String[] args) { - startApplication(new WordCountPattern(), args); - } +public class WordCountPattern implements StreamsApp { @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream textLines = builder.stream(this.getInputPattern()); + public void buildTopology(final TopologyBuilder builder) { + final KStream textLines = builder.streamInputPattern(); final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); final KTable wordCounts = textLines @@ -53,11 +51,11 @@ public void buildTopology(final StreamsBuilder builder) { .count(Materialized.as("counts")); final Serde longValueSerde = Serdes.Long(); - wordCounts.toStream().to(this.outputTopic, Produced.valueSerde(longValueSerde)); + wordCounts.toStream().to(builder.getTopics().getOutputTopic(), Produced.valueSerde(longValueSerde)); } @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); + public String getUniqueAppId(final StreamsTopicConfig topics) { + return this.getClass().getSimpleName() + "-" + topics.getOutputTopic(); } } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/util/SchemaTopicClientTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/SchemaTopicClientTest.java similarity index 92% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/util/SchemaTopicClientTest.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/SchemaTopicClientTest.java index ec84ea6e..536854dd 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/util/SchemaTopicClientTest.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/SchemaTopicClientTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -25,8 +25,7 @@ package com.bakdata.kafka.util; -import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; -import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig; +import static com.bakdata.kafka.TestUtil.newKafkaCluster; import static net.mguenther.kafka.junit.Wait.delay; import com.bakdata.kafka.TestRecord; @@ -39,7 +38,7 @@ import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.List; -import java.util.Properties; +import java.util.Map; import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; import net.mguenther.kafka.junit.EmbeddedKafkaCluster; @@ -63,7 +62,7 @@ class SchemaTopicClientTest { private static final String TOPIC = "topic"; @RegisterExtension final SchemaRegistryMockExtension schemaRegistryMockExtension = new SchemaRegistryMockExtension(); - private final EmbeddedKafkaCluster kafkaCluster = provisionWith(defaultClusterConfig()); + private final EmbeddedKafkaCluster kafkaCluster = newKafkaCluster(); @InjectSoftAssertions SoftAssertions softly; @@ -173,15 +172,17 @@ void shouldDeleteTopicAndKeepSchemaWhenSchemaRegistryUrlIsNotSet() throws Interr } private SchemaTopicClient createClientWithSchemaRegistry() { - final Properties kafkaProperties = new Properties(); - kafkaProperties.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaCluster.getBrokerList()); + final Map kafkaProperties = Map.of( + AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaCluster.getBrokerList() + ); return SchemaTopicClient.create(kafkaProperties, this.schemaRegistryMockExtension.getUrl(), Duration.of(TIMEOUT_SECONDS, ChronoUnit.SECONDS)); } private SchemaTopicClient createClientWithNoSchemaRegistry() { - final Properties kafkaProperties = new Properties(); - kafkaProperties.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaCluster.getBrokerList()); + final Map kafkaProperties = Map.of( + AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaCluster.getBrokerList() + ); return SchemaTopicClient.create(kafkaProperties, Duration.of(TIMEOUT_SECONDS, ChronoUnit.SECONDS)); } diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/util/TopicClientTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/TopicClientTest.java similarity index 87% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/util/TopicClientTest.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/TopicClientTest.java index 2ca392cd..80c762a4 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/util/TopicClientTest.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/TopicClientTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,8 +24,10 @@ package com.bakdata.kafka.util; +import static com.bakdata.kafka.TestUtil.newKafkaConfig; import static java.util.Collections.emptyMap; import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; +import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.newClusterConfig; import static net.mguenther.kafka.junit.Wait.delay; import static org.assertj.core.api.Assertions.assertThat; @@ -33,8 +35,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import net.mguenther.kafka.junit.EmbeddedKafkaCluster; -import net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig; -import net.mguenther.kafka.junit.EmbeddedKafkaConfig; import net.mguenther.kafka.junit.TopicConfig; import org.apache.kafka.clients.admin.AdminClientConfig; import org.junit.jupiter.api.AfterEach; @@ -44,17 +44,11 @@ class TopicClientTest { private static final Duration CLIENT_TIMEOUT = Duration.ofSeconds(10L); - private final EmbeddedKafkaCluster kafkaCluster = createKafkaCluster(); - - private static EmbeddedKafkaCluster createKafkaCluster() { - final EmbeddedKafkaConfig kafkaConfig = EmbeddedKafkaConfig.brokers() - .withNumberOfBrokers(2) - .build(); - final EmbeddedKafkaClusterConfig clusterConfig = EmbeddedKafkaClusterConfig.newClusterConfig() - .configure(kafkaConfig) - .build(); - return provisionWith(clusterConfig); - } + private final EmbeddedKafkaCluster kafkaCluster = provisionWith(newClusterConfig() + .configure(newKafkaConfig() + .withNumberOfBrokers(2) + .build()) + .build()); @BeforeEach void setup() throws InterruptedException { diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/util/TopologyInformationTest.java b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/TopologyInformationTest.java similarity index 85% rename from streams-bootstrap/src/test/java/com/bakdata/kafka/util/TopologyInformationTest.java rename to streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/TopologyInformationTest.java index 6ea870a3..9951285d 100644 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/util/TopologyInformationTest.java +++ b/streams-bootstrap-core/src/test/java/com/bakdata/kafka/util/TopologyInformationTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -26,9 +26,14 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.bakdata.kafka.KafkaStreamsApplication; +import com.bakdata.kafka.AppConfiguration; +import com.bakdata.kafka.ConfiguredStreamsApp; +import com.bakdata.kafka.KafkaEndpointConfig; +import com.bakdata.kafka.StreamsApp; +import com.bakdata.kafka.StreamsTopicConfig; import com.bakdata.kafka.test_applications.ComplexTopologyApplication; import java.util.List; +import java.util.Map; import java.util.regex.Pattern; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.KStream; @@ -41,29 +46,40 @@ class TopologyInformationTest { - private KafkaStreamsApplication app = null; + private StreamsApp app = null; + private StreamsTopicConfig topics; private TopologyInformation topologyInformation = null; @BeforeEach void setup() { this.app = new ComplexTopologyApplication(); - this.app.setInputTopics(List.of("input", "input2")); - this.app.setOutputTopic("output"); - this.topologyInformation = new TopologyInformation(this.app.createTopology(), this.app.getUniqueAppId()); + this.topics = StreamsTopicConfig.builder() + .inputTopics(List.of("input", "input2")) + .outputTopic("output") + .build(); + final AppConfiguration configuration = new AppConfiguration<>(this.topics); + final ConfiguredStreamsApp configuredApp = new ConfiguredStreamsApp<>(this.app, configuration); + final Map kafkaProperties = configuredApp.getKafkaProperties( + KafkaEndpointConfig.builder() + .brokers("localhost:9092") + .build()); + this.topologyInformation = + new TopologyInformation(configuredApp.createTopology(kafkaProperties), + this.app.getUniqueAppId(this.topics)); } @Test void shouldReturnAllExternalSinkTopics() { assertThat(this.topologyInformation.getExternalSinkTopics()) .containsExactly(ComplexTopologyApplication.THROUGH_TOPIC, - this.app.getOutputTopic()); + this.topics.getOutputTopic()); } @Test void shouldReturnAllExternalSourceTopics() { assertThat(this.topologyInformation.getExternalSourceTopics(List.of())) .hasSize(2) - .containsAll(this.app.getInputTopics()) + .containsAll(this.topics.getInputTopics()) .doesNotContain(ComplexTopologyApplication.THROUGH_TOPIC); } @@ -72,7 +88,7 @@ void shouldReturnAllIntermediateTopics() { assertThat(this.topologyInformation.getIntermediateTopics(List.of())) .hasSize(1) .containsExactly(ComplexTopologyApplication.THROUGH_TOPIC) - .doesNotContainAnyElementsOf(this.app.getInputTopics()); + .doesNotContainAnyElementsOf(this.topics.getInputTopics()); } @Test @@ -124,14 +140,14 @@ void shouldNotReturnFakeRepartitionTopics() { @Test void shouldNotReturnInputTopics() { assertThat(this.topologyInformation.getExternalSinkTopics()) - .doesNotContainAnyElementsOf(this.app.getInputTopics()); + .doesNotContainAnyElementsOf(this.topics.getInputTopics()); } @Test void shouldReturnAllInternalTopics() { assertThat(this.topologyInformation.getInternalTopics()) .hasSize(3) - .allMatch(topic -> topic.contains("-KSTREAM-") && topic.startsWith(this.app.getUniqueAppId()) + .allMatch(topic -> topic.contains("-KSTREAM-") && topic.startsWith(this.app.getUniqueAppId(this.topics)) || topic.startsWith("KSTREAM-")) .allMatch(topic -> topic.endsWith("-changelog") || topic.endsWith("-repartition")); } diff --git a/streams-bootstrap-core/src/test/resources/log4j2.xml b/streams-bootstrap-core/src/test/resources/log4j2.xml new file mode 100644 index 00000000..0d4071ce --- /dev/null +++ b/streams-bootstrap-core/src/test/resources/log4j2.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/streams-bootstrap-large-messages/build.gradle.kts b/streams-bootstrap-large-messages/build.gradle.kts index 0bf48f77..8b75b2a2 100644 --- a/streams-bootstrap-large-messages/build.gradle.kts +++ b/streams-bootstrap-large-messages/build.gradle.kts @@ -1,6 +1,6 @@ description = "Utils for using Large Message SerDe with your Kafka Streams Application" dependencies { - api(project(":streams-bootstrap")) + api(project(":streams-bootstrap-core")) implementation(group = "com.bakdata.kafka", name = "large-message-core", version = "2.6.0") } diff --git a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaApplicationUtils.java b/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaApplicationUtils.java index 4a0617b6..763971c6 100644 --- a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaApplicationUtils.java +++ b/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageKafkaApplicationUtils.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -24,38 +24,45 @@ package com.bakdata.kafka; -import java.util.function.Consumer; +import com.bakdata.kafka.HasTopicHooks.TopicHook; +import java.util.Map; import lombok.experimental.UtilityClass; /** - * Utility class that provides helpers for using {@code LargeMessageSerde} with {@link KafkaApplication} + * Utility class that provides helpers for cleaning {@code LargeMessageSerde} artifacts */ @UtilityClass public class LargeMessageKafkaApplicationUtils { /** * Create a hook that cleans up LargeMessage files associated with a topic. It is expected that all necessary - * properties to create a {@link AbstractLargeMessageConfig} are part of - * {@link KafkaApplication#getKafkaProperties()}. + * properties to create a {@link AbstractLargeMessageConfig} are part of {@code kafkaProperties}. * - * @param app {@code KafkaApplication} to create hook from + * @param kafkaProperties Kafka properties to create hook from * @return hook that cleans up LargeMessage files associated with a topic - * @see CleanUpRunner#registerTopicCleanUpHook(Consumer) + * @see HasTopicHooks#registerTopicHook(TopicHook) */ - public static Consumer createLargeMessageCleanUpHook(final KafkaApplication app) { - final AbstractLargeMessageConfig largeMessageConfig = new AbstractLargeMessageConfig(app.getKafkaProperties()); + public static TopicHook createLargeMessageCleanUpHook(final Map kafkaProperties) { + final AbstractLargeMessageConfig largeMessageConfig = new AbstractLargeMessageConfig(kafkaProperties); final LargeMessageStoringClient storer = largeMessageConfig.getStorer(); - return storer::deleteAllFiles; + return new TopicHook() { + @Override + public void deleted(final String topic) { + storer.deleteAllFiles(topic); + } + }; } /** - * Register a hook that cleans up LargeMessage files associated with a topic. + * Create a hook that cleans up LargeMessage files associated with a topic. It is expected that all necessary + * properties to create a {@link AbstractLargeMessageConfig} are part of + * {@link EffectiveAppConfiguration#getKafkaProperties()}. * - * @param app {@code KafkaApplication} to create hook from - * @param cleanUpRunner {@code CleanUpRunner} to register hook on - * @see #createLargeMessageCleanUpHook(KafkaApplication) + * @param configuration Configuration to create hook from + * @return hook that cleans up LargeMessage files associated with a topic + * @see #createLargeMessageCleanUpHook(Map) */ - public static void registerLargeMessageCleanUpHook(final KafkaApplication app, final CleanUpRunner cleanUpRunner) { - final Consumer deleteAllFiles = createLargeMessageCleanUpHook(app); - cleanUpRunner.registerTopicCleanUpHook(deleteAllFiles); + public static TopicHook createLargeMessageCleanUpHook(final EffectiveAppConfiguration configuration) { + return createLargeMessageCleanUpHook(configuration.getKafkaProperties()); } + } diff --git a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageProducerApp.java b/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageProducerApp.java new file mode 100644 index 00000000..acd2c2a0 --- /dev/null +++ b/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageProducerApp.java @@ -0,0 +1,52 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * {@link ProducerApp} that automatically removes files associated with {@code LargeMessageSerializer} + */ +public interface LargeMessageProducerApp extends ProducerApp { + + /** + * Register a hook that cleans up LargeMessage files associated with a topic + * @param cleanUpConfiguration Configuration to register hook on + * @param configuration Configuration to create hook from + * @return {@code ProducerCleanUpConfiguration} with registered topic hook + * @see LargeMessageKafkaApplicationUtils#createLargeMessageCleanUpHook(EffectiveAppConfiguration) + */ + static ProducerCleanUpConfiguration registerLargeMessageCleanUpHook( + final ProducerCleanUpConfiguration cleanUpConfiguration, final EffectiveAppConfiguration configuration) { + return cleanUpConfiguration.registerTopicHook( + LargeMessageKafkaApplicationUtils.createLargeMessageCleanUpHook(configuration)); + } + + @Override + default ProducerCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration configuration) { + final ProducerCleanUpConfiguration cleanUpConfiguration = ProducerApp.super.setupCleanUp(configuration); + return registerLargeMessageCleanUpHook(cleanUpConfiguration, configuration); + } + +} diff --git a/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageStreamsApp.java b/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageStreamsApp.java new file mode 100644 index 00000000..585e87c2 --- /dev/null +++ b/streams-bootstrap-large-messages/src/main/java/com/bakdata/kafka/LargeMessageStreamsApp.java @@ -0,0 +1,52 @@ +/* + * MIT License + * + * Copyright (c) 2024 bakdata + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package com.bakdata.kafka; + +/** + * {@link StreamsApp} that automatically removes files associated with {@code LargeMessageSerde} + */ +public interface LargeMessageStreamsApp extends StreamsApp { + + /** + * Register a hook that cleans up LargeMessage files associated with a topic + * @param cleanUpConfiguration Configuration to register hook on + * @param configuration Configuration to create hook from + * @return {@code StreamsCleanUpConfiguration} with registered topic hook + * @see LargeMessageKafkaApplicationUtils#createLargeMessageCleanUpHook(EffectiveAppConfiguration) + */ + static StreamsCleanUpConfiguration registerLargeMessageCleanUpHook( + final StreamsCleanUpConfiguration cleanUpConfiguration, final EffectiveAppConfiguration configuration) { + return cleanUpConfiguration.registerTopicHook( + LargeMessageKafkaApplicationUtils.createLargeMessageCleanUpHook(configuration)); + } + + @Override + default StreamsCleanUpConfiguration setupCleanUp( + final EffectiveAppConfiguration configuration) { + final StreamsCleanUpConfiguration cleanUpConfiguration = StreamsApp.super.setupCleanUp(configuration); + return registerLargeMessageCleanUpHook(cleanUpConfiguration, configuration); + } + +} diff --git a/streams-bootstrap-test/build.gradle.kts b/streams-bootstrap-test/build.gradle.kts index 43684367..99a47667 100644 --- a/streams-bootstrap-test/build.gradle.kts +++ b/streams-bootstrap-test/build.gradle.kts @@ -1,7 +1,7 @@ description = "Utils for testing your Kafka Streams Application" dependencies { - api(project(":streams-bootstrap")) + api(project(":streams-bootstrap-core")) val fluentKafkaVersion: String by project api( group = "com.bakdata.fluent-kafka-streams-tests", diff --git a/streams-bootstrap-test/src/main/java/com/bakdata/kafka/StreamsBootstrapTopologyFactory.java b/streams-bootstrap-test/src/main/java/com/bakdata/kafka/StreamsBootstrapTopologyFactory.java index 54784e62..f4a2402e 100644 --- a/streams-bootstrap-test/src/main/java/com/bakdata/kafka/StreamsBootstrapTopologyFactory.java +++ b/streams-bootstrap-test/src/main/java/com/bakdata/kafka/StreamsBootstrapTopologyFactory.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2023 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -26,92 +26,123 @@ import com.bakdata.fluent_kafka_streams_tests.TestTopology; import com.bakdata.fluent_kafka_streams_tests.junit5.TestTopologyExtension; +import com.bakdata.kafka.KafkaEndpointConfig.KafkaEndpointConfigBuilder; import java.util.Map; import java.util.function.Function; import lombok.experimental.UtilityClass; /** - * Utility class that provides helpers for using Fluent Kafka Streams Tests with {@link KafkaStreamsApplication} + * Utility class that provides helpers for using Fluent Kafka Streams Tests with {@link ConfiguredStreamsApp} */ @UtilityClass public class StreamsBootstrapTopologyFactory { /** - * Create a {@code TestTopology} from a {@code KafkaStreamsApplication}. This also sets - * {@link KafkaStreamsApplication#schemaRegistryUrl}. + * Create a {@code TestTopology} from a {@code ConfiguredStreamsApp}. It injects are {@link KafkaEndpointConfig} + * with configured Schema Registry. * - * @param app KafkaStreamsApplication to create TestTopology from + * @param app ConfiguredStreamsApp to create TestTopology from * @param Default type of keys * @param Default type of values - * @return {@code TestTopology} that uses topology and configuration provided by {@code KafkaStreamsApplication} - * @see KafkaStreamsApplication#getKafkaProperties() - * @see KafkaStreamsApplication#createTopology() + * @return {@code TestTopology} that uses topology and configuration provided by {@code ConfiguredStreamsApp} + * @see ConfiguredStreamsApp#getKafkaProperties(KafkaEndpointConfig) + * @see ConfiguredStreamsApp#createTopology(Map) */ - public static TestTopology createTopologyWithSchemaRegistry(final KafkaStreamsApplication app) { + public static TestTopology createTopologyWithSchemaRegistry( + final ConfiguredStreamsApp app) { return new TestTopology<>(app::createTopology, getKafkaPropertiesWithSchemaRegistryUrl(app)); } /** - * Create a {@code TestTopologyExtension} from a {@code KafkaStreamsApplication}. This also sets - * {@link KafkaStreamsApplication#schemaRegistryUrl}. + * Create a {@code TestTopologyExtension} from a {@code ConfiguredStreamsApp}. It injects are + * {@link KafkaEndpointConfig} with configured Schema Registry. * - * @param app KafkaStreamsApplication to create TestTopology from + * @param app ConfiguredStreamsApp to create TestTopology from * @param Default type of keys * @param Default type of values - * @return {@code TestTopologyExtension} that uses topology and configuration provided by - * {@code KafkaStreamsApplication} - * @see KafkaStreamsApplication#getKafkaProperties() - * @see KafkaStreamsApplication#createTopology() + * @return {@code TestTopologyExtension} that uses topology and configuration provided by {@code + * ConfiguredStreamsApp} + * @see ConfiguredStreamsApp#getKafkaProperties(KafkaEndpointConfig) + * @see ConfiguredStreamsApp#createTopology(Map) */ public static TestTopologyExtension createTopologyExtensionWithSchemaRegistry( - final KafkaStreamsApplication app) { + final ConfiguredStreamsApp app) { return new TestTopologyExtension<>(app::createTopology, getKafkaPropertiesWithSchemaRegistryUrl(app)); } /** - * Create a {@code TestTopology} from a {@code KafkaStreamsApplication}. This does not set - * {@link KafkaStreamsApplication#schemaRegistryUrl}. + * Create a {@code TestTopology} from a {@code ConfiguredStreamsApp}. It injects are {@link KafkaEndpointConfig} + * without configured Schema Registry. * - * @param app KafkaStreamsApplication to create TestTopology from + * @param app ConfiguredStreamsApp to create TestTopology from * @param Default type of keys * @param Default type of values - * @return {@code TestTopology} that uses topology and configuration provided by {@code KafkaStreamsApplication} - * @see KafkaStreamsApplication#getKafkaProperties() - * @see KafkaStreamsApplication#createTopology() + * @return {@code TestTopology} that uses topology and configuration provided by {@code ConfiguredStreamsApp} + * @see ConfiguredStreamsApp#getKafkaProperties(KafkaEndpointConfig) + * @see ConfiguredStreamsApp#createTopology(Map) */ - public static TestTopology createTopology(final KafkaStreamsApplication app) { - return new TestTopology<>(app::createTopology, app.getKafkaProperties()); + public static TestTopology createTopology(final ConfiguredStreamsApp app) { + return new TestTopology<>(app::createTopology, getKafkaProperties(app)); } /** - * Create a {@code TestTopologyExtension} from a {@code KafkaStreamsApplication}. This does not set - * {@link KafkaStreamsApplication#schemaRegistryUrl}. + * Create a {@code TestTopologyExtension} from a {@code ConfiguredStreamsApp}. It injects are + * {@link KafkaEndpointConfig} without configured Schema Registry. * - * @param app KafkaStreamsApplication to create TestTopology from + * @param app ConfiguredStreamsApp to create TestTopology from * @param Default type of keys * @param Default type of values * @return {@code TestTopologyExtension} that uses topology and configuration provided by - * {@code KafkaStreamsApplication} - * @see KafkaStreamsApplication#getKafkaProperties() - * @see KafkaStreamsApplication#createTopology() + * {@code ConfiguredStreamsApp} + * @see ConfiguredStreamsApp#getKafkaProperties(KafkaEndpointConfig) + * @see ConfiguredStreamsApp#createTopology(Map) */ - public static TestTopologyExtension createTopologyExtension(final KafkaStreamsApplication app) { - return new TestTopologyExtension<>(app::createTopology, app.getKafkaProperties()); + public static TestTopologyExtension createTopologyExtension( + final ConfiguredStreamsApp app) { + return new TestTopologyExtension<>(app::createTopology, getKafkaProperties(app)); } /** - * Get Kafka properties from a {@code KafkaStreamsApplication} after configuring - * {@link KafkaStreamsApplication#schemaRegistryUrl}. + * Get Kafka properties from a {@code ConfiguredStreamsApp} after using a {@link KafkaEndpointConfig} with + * configured Schema Registry. * - * @param app KafkaStreamsApplication to get Kafka properties of + * @param app ConfiguredStreamsApp to get Kafka properties of * @return Kafka properties + * @see ConfiguredStreamsApp#getKafkaProperties(KafkaEndpointConfig) */ - public static Function> getKafkaPropertiesWithSchemaRegistryUrl( - final KafkaApplication app) { + public static Function> getKafkaPropertiesWithSchemaRegistryUrl( + final ConfiguredStreamsApp app) { return schemaRegistryUrl -> { - app.setSchemaRegistryUrl(schemaRegistryUrl); - return app.getKafkaProperties(); + final KafkaEndpointConfig endpointConfig = newEndpointConfig() + .schemaRegistryUrl(schemaRegistryUrl) + .build(); + return app.getKafkaProperties(endpointConfig); }; } + /** + * Create {@code Configurator} to configure {@link org.apache.kafka.common.serialization.Serde} and + * {@link org.apache.kafka.common.serialization.Serializer} using the {@code TestTopology} properties. + * @param testTopology {@code TestTopology} to use properties of + * @return {@code Configurator} + * @see TestTopology#getProperties() + */ + public static Configurator createConfigurator(final TestTopology testTopology) { + return new Configurator(testTopology.getProperties()); + } + + private static Map getKafkaProperties(final ConfiguredStreamsApp app) { + final KafkaEndpointConfig endpointConfig = createEndpointConfig(); + return app.getKafkaProperties(endpointConfig); + } + + private static KafkaEndpointConfig createEndpointConfig() { + return newEndpointConfig() + .build(); + } + + private static KafkaEndpointConfigBuilder newEndpointConfig() { + return KafkaEndpointConfig.builder() + .brokers("localhost:9092"); + } } diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/CleanUpRunner.java b/streams-bootstrap/src/main/java/com/bakdata/kafka/CleanUpRunner.java deleted file mode 100644 index 49893251..00000000 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/CleanUpRunner.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2024 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka; - -import static com.bakdata.kafka.KafkaApplication.RESET_SLEEP_MS; - -import com.bakdata.kafka.util.ConsumerGroupClient; -import com.bakdata.kafka.util.ImprovedAdminClient; -import com.bakdata.kafka.util.TopologyInformation; -import com.google.common.collect.ImmutableList; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import lombok.Builder; -import lombok.Getter; -import lombok.NonNull; -import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.Topology; -import org.apache.kafka.tools.StreamsResetter; - - -/** - * Clean up the state and artifacts of your Kafka Streams app - */ -@Slf4j -public final class CleanUpRunner { - private static final int EXIT_CODE_SUCCESS = 0; - private final String appId; - private final KafkaStreams streams; - private final TopologyInformation topologyInformation; - @Getter - private final @NonNull ImprovedAdminClient adminClient; - private final @NonNull Collection> topicCleanUpHooks = new ArrayList<>(); - - - @Builder - private CleanUpRunner(final @NonNull Topology topology, final @NonNull String appId, - final @NonNull ImprovedAdminClient adminClient, final @NonNull KafkaStreams streams) { - this.appId = appId; - this.adminClient = adminClient; - this.streams = streams; - this.topologyInformation = new TopologyInformation(topology, appId); - } - - /** - * Run the Kafka - * Streams Reset Tool - * - * @param inputTopics list of input topics of the streams app - * @param intermediateTopics list of intermediate topics of the streams app - * @param allTopics list of all topics that exists in the Kafka cluster - * @param adminClient admin client to use for resetting the streams app - * @param appId unique app id of the streams app - */ - public static void runResetter(final Collection inputTopics, final Collection intermediateTopics, - final Collection allTopics, final ImprovedAdminClient adminClient, final String appId) { - // StreamsResetter's internal AdminClient can only be configured with a properties file - final File tempFile = createTemporaryPropertiesFile(appId, adminClient.getProperties()); - final ImmutableList.Builder argList = ImmutableList.builder() - .add("--application-id", appId) - .add("--bootstrap-servers", adminClient.getBootstrapServers()) - .add("--config-file", tempFile.toString()); - final Collection existingInputTopics = filterExistingTopics(inputTopics, allTopics); - if (!existingInputTopics.isEmpty()) { - argList.add("--input-topics", String.join(",", existingInputTopics)); - } - final Collection existingIntermediateTopics = filterExistingTopics(intermediateTopics, allTopics); - if (!existingIntermediateTopics.isEmpty()) { - argList.add("--intermediate-topics", String.join(",", existingIntermediateTopics)); - } - final String[] args = argList.build().toArray(String[]::new); - final StreamsResetter resetter = new StreamsResetter(); - final int returnCode = resetter.execute(args); - try { - Files.delete(tempFile.toPath()); - } catch (final IOException e) { - log.warn("Error deleting temporary property file", e); - } - if (returnCode != EXIT_CODE_SUCCESS) { - throw new CleanUpException("Error running streams resetter. Exit code " + returnCode); - } - } - - static File createTemporaryPropertiesFile(final String appId, final Map config) { - // Writing properties requires Map - final Properties parsedProperties = toStringBasedProperties(config); - try { - final File tempFile = File.createTempFile(appId + "-reset", "temp"); - try (final FileOutputStream out = new FileOutputStream(tempFile)) { - parsedProperties.store(out, ""); - } - return tempFile; - } catch (final IOException e) { - throw new CleanUpException("Could not run StreamsResetter", e); - } - } - - static Properties toStringBasedProperties(final Map config) { - final Properties parsedProperties = new Properties(); - config.forEach((key, value) -> parsedProperties.setProperty(key.toString(), value.toString())); - return parsedProperties; - } - - private static Collection filterExistingTopics(final Collection topics, - final Collection allTopics) { - return topics.stream() - .filter(topicName -> { - final boolean exists = allTopics.contains(topicName); - if (!exists) { - log.warn("Not resetting missing topic {}", topicName); - } - return exists; - }) - .collect(Collectors.toList()); - } - - /** - * Register a hook that is executed whenever a topic has been deleted by the cleanup runner. - * - * @param cleanUpAction Action to run when a topic requires clean up. Topic is passed as parameter - * @return this for chaining - */ - public CleanUpRunner registerTopicCleanUpHook(final Consumer cleanUpAction) { - this.topicCleanUpHooks.add(cleanUpAction); - return this; - } - - /** - * Clean up your Streams app by resetting the app, deleting local state and optionally deleting the output topics - * and consumer group - * - * @param deleteOutputTopic whether to delete output topics and consumer group - */ - public void run(final boolean deleteOutputTopic) { - final Collection allTopics = this.adminClient.getTopicClient().listTopics(); - final List inputTopics = this.topologyInformation.getExternalSourceTopics(allTopics); - final List intermediateTopics = this.topologyInformation.getIntermediateTopics(allTopics); - runResetter(inputTopics, intermediateTopics, allTopics, this.adminClient, this.appId); - // the StreamsResetter is responsible for deleting internal topics - this.topologyInformation.getInternalTopics() - .forEach(this::resetInternalTopic); - if (deleteOutputTopic) { - this.deleteTopics(); - this.deleteConsumerGroup(); - } - this.streams.cleanUp(); - try { - Thread.sleep(RESET_SLEEP_MS); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new CleanUpException("Error waiting for clean up", e); - } - } - - /** - * Delete output topics - */ - public void deleteTopics() { - final List externalTopics = this.topologyInformation.getExternalSinkTopics(); - externalTopics.forEach(this::deleteTopic); - } - - private void resetInternalTopic(final String topic) { - this.adminClient.getSchemaTopicClient() - .resetSchemaRegistry(topic); - this.runTopicCleanUp(topic); - } - - private void runTopicCleanUp(final String topic) { - this.topicCleanUpHooks.forEach(hook -> hook.accept(topic)); - } - - private void deleteTopic(final String topic) { - this.adminClient.getSchemaTopicClient() - .deleteTopicAndResetSchemaRegistry(topic); - this.runTopicCleanUp(topic); - } - - private void deleteConsumerGroup() { - final ConsumerGroupClient consumerGroupClient = this.adminClient.getConsumerGroupClient(); - consumerGroupClient.deleteGroupIfExists(this.appId); - } - -} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaApplication.java b/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaApplication.java deleted file mode 100644 index 14daf895..00000000 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaApplication.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka; - -import com.bakdata.kafka.util.ImprovedAdminClient; -import com.google.common.base.Preconditions; -import java.time.Duration; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Properties; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import lombok.Setter; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.core.config.Configurator; -import picocli.CommandLine; - -/** - *

The base class of the entry point of the Kafka application.

- * This class provides common configuration options, e.g., {@link #brokers}, for Kafka applications. Hereby it - * automatically populates the passed in command line arguments with matching environment arguments - * {@link EnvironmentArgumentsParser}. To implement your Kafka application inherit from this class and add your custom - * options. - */ -@ToString -@Getter -@Setter -@RequiredArgsConstructor -@Slf4j -public abstract class KafkaApplication implements Runnable { - public static final int RESET_SLEEP_MS = 5000; - public static final Duration ADMIN_TIMEOUT = Duration.ofSeconds(10L); - private static final String ENV_PREFIX = Optional.ofNullable( - System.getenv("ENV_PREFIX")).orElse("APP_"); - /** - * This variable is usually set on application start. When the application is running in debug mode it is used to - * reconfigure the child app package logger. By default, it points to the package of this class allowing to execute - * the run method independently. - */ - protected static String appPackageName = KafkaApplication.class.getPackageName(); - @CommandLine.Option(names = "--output-topic", description = "Output topic") - protected String outputTopic; - @CommandLine.Option(names = "--extra-output-topics", split = ",", description = "Additional named output topics") - protected Map extraOutputTopics = new HashMap<>(); - @CommandLine.Option(names = "--brokers", required = true, description = "Broker addresses to connect to") - protected String brokers = ""; - @CommandLine.Option(names = "--debug", arity = "0..1", description = "Configure logging to debug") - protected boolean debug; - @CommandLine.Option(names = "--clean-up", arity = "0..1", - description = "Clear the state store and the global Kafka offsets for the " - + "consumer group. Be careful with running in production and with enabling this flag - it " - + "might cause inconsistent processing with multiple replicas.") - protected boolean cleanUp; - @CommandLine.Option(names = "--schema-registry-url", description = "URL of Schema Registry") - protected String schemaRegistryUrl; - @CommandLine.Option(names = {"-h", "--help"}, usageHelp = true, description = "print this help and exit") - private boolean helpRequested; - //TODO change to more generic parameter name in the future. Retain old name for backwards compatibility - @CommandLine.Option(names = "--streams-config", split = ",", description = "Additional Kafka properties") - private Map streamsConfig = new HashMap<>(); - - /** - *

This methods needs to be called in the executable custom application class inheriting from - * {@code KafkaApplication}.

- *

This method calls System exit

- * - * @param app An instance of the custom application class. - * @param args Arguments passed in by the custom application class. - * @see #startApplicationWithoutExit(KafkaApplication, String[]) - */ - protected static void startApplication(final KafkaApplication app, final String[] args) { - final int exitCode = startApplicationWithoutExit(app, args); - System.exit(exitCode); - } - - /** - *

This methods needs to be called in the executable custom application class inheriting from - * {@code KafkaApplication}.

- * - * @param app An instance of the custom application class. - * @param args Arguments passed in by the custom application class. - * @return Exit code of application - */ - protected static int startApplicationWithoutExit(final KafkaApplication app, final String[] args) { - appPackageName = app.getClass().getPackageName(); - final String[] populatedArgs = addEnvironmentVariablesArguments(args); - final CommandLine commandLine = new CommandLine(app); - return commandLine.execute(populatedArgs); - } - - static String[] addEnvironmentVariablesArguments(final String[] args) { - Preconditions.checkArgument(!ENV_PREFIX.equals(EnvironmentStreamsConfigParser.PREFIX), - "Prefix '" + EnvironmentStreamsConfigParser.PREFIX + "' is reserved for Streams config"); - final List environmentArguments = new EnvironmentArgumentsParser(ENV_PREFIX) - .parseVariables(System.getenv()); - final Collection allArgs = new ArrayList<>(environmentArguments); - allArgs.addAll(Arrays.asList(args)); - return allArgs.toArray(String[]::new); - } - - @Override - public void run() { - log.info("Starting application"); - if (this.debug) { - Configurator.setLevel("com.bakdata", Level.DEBUG); - Configurator.setLevel(appPackageName, Level.DEBUG); - } - log.debug(this.toString()); - } - - /** - *

This method specifies the configuration to run your Kafka application with.

- * To add a custom configuration please override {@link #createKafkaProperties()}. Configuration properties - * specified via environment (starting with STREAMS_) or via cli option {@code --streams-config} are always applied - * with highest priority (the latter overrides the former). - * - * @return Returns Kafka configuration {@link Properties} - */ - public final Properties getKafkaProperties() { - final Properties kafkaConfig = this.createKafkaProperties(); - - EnvironmentStreamsConfigParser.parseVariables(System.getenv()) - .forEach(kafkaConfig::setProperty); - this.streamsConfig.forEach(kafkaConfig::setProperty); - - return kafkaConfig; - } - - /** - * Get extra output topic for a specified role - * - * @param role role of output topic specified in CLI argument - * @return topic name - */ - public String getOutputTopic(final String role) { - final String topic = this.extraOutputTopics.get(role); - Preconditions.checkNotNull(topic, "No output topic for role '%s' available", role); - return topic; - } - - /** - * Create an admin client for the configured Kafka cluster - * - * @return admin client - */ - public ImprovedAdminClient createAdminClient() { - return ImprovedAdminClient.builder() - .properties(this.getKafkaProperties()) - .schemaRegistryUrl(this.schemaRegistryUrl) - .timeout(ADMIN_TIMEOUT) - .build(); - } - - protected abstract Properties createKafkaProperties(); - - protected abstract void runCleanUp(); -} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaProducerApplication.java b/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaProducerApplication.java deleted file mode 100644 index 5b7ac6a5..00000000 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaProducerApplication.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka; - -import com.bakdata.kafka.util.ImprovedAdminClient; -import com.bakdata.kafka.util.SchemaTopicClient; -import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; -import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer; -import java.util.Properties; -import java.util.function.Consumer; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import lombok.Setter; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.StringSerializer; -import org.jooq.lambda.Seq; - - -/** - *

The base class of the entry point of a producer application.

- * This class provides common configuration options, e.g., {@link #brokers}, for producer applications. Hereby it - * automatically populates the passed in command line arguments with matching environment arguments - * {@link EnvironmentArgumentsParser}. To implement your producer application inherit from this class and add your - * custom options. Call {@link #startApplication(KafkaApplication, String[])} with a fresh instance of your class from - * your main. - */ -@ToString(callSuper = true) -@Getter -@Setter -@RequiredArgsConstructor -@Slf4j -public abstract class KafkaProducerApplication extends KafkaApplication { - - @Override - public void run() { - super.run(); - - if (this.cleanUp) { - this.runCleanUp(); - } else { - this.runApplication(); - } - } - - protected abstract void runApplication(); - - /** - *

This method should give a default configuration to run your producer application with.

- * If {@link KafkaApplication#schemaRegistryUrl} is set {@link SpecificAvroSerializer} is set as the default key, - * value serializer. Otherwise, {@link StringSerializer} is configured as the default key, value serializer. To add - * a custom configuration, please add a similar method to your custom application class: - *
{@code
-     *   protected Properties createKafkaProperties() {
-     *       # Try to always use the kafka properties from the super class as base Map
-     *       Properties kafkaConfig = super.createKafkaProperties();
-     *       kafkaConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, GenericAvroSerializer.class);
-     *       kafkaConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, GenericAvroSerializer.class);
-     *       return kafkaConfig;
-     *   }
-     * }
- * - * @return Returns a default Kafka configuration {@link Properties} - */ - @Override - protected Properties createKafkaProperties() { - final Properties kafkaConfig = new Properties(); - - // exactly once and order - kafkaConfig.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 1); - kafkaConfig.setProperty(ProducerConfig.ACKS_CONFIG, "all"); - - // compression - kafkaConfig.setProperty(ProducerConfig.COMPRESSION_TYPE_CONFIG, "gzip"); - - this.configureDefaultSerializer(kafkaConfig); - kafkaConfig.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokers); - return kafkaConfig; - } - - protected KafkaProducer createProducer() { - final Properties properties = new Properties(); - properties.putAll(this.getKafkaProperties()); - return new KafkaProducer<>(properties); - } - - /** - * This method deletes all output topics. - */ - @Override - protected void runCleanUp() { - try (final ImprovedAdminClient improvedAdminClient = this.createAdminClient()) { - this.cleanUpRun(improvedAdminClient.getSchemaTopicClient()); - } - } - - protected void cleanUpRun(final SchemaTopicClient schemaTopicClient) { - final Iterable outputTopics = this.getAllOutputTopics(); - final Consumer topicCleanUpHook = this.createTopicCleanUpHook(); - outputTopics.forEach(topic -> { - schemaTopicClient.deleteTopicAndResetSchemaRegistry(topic); - topicCleanUpHook.accept(topic); - }); - - try { - Thread.sleep(RESET_SLEEP_MS); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new CleanUpException("Error waiting for clean up", e); - } - } - - /** - * Create a hook that is executed whenever a topic is deleted in clean up. - * - * @return Action to run when a topic requires clean up. Topic is passed as parameter - */ - protected Consumer createTopicCleanUpHook() { - // do nothing by default - return topic -> {}; - } - - private void configureDefaultSerializer(final Properties kafkaConfig) { - if (this.schemaRegistryUrl == null) { - kafkaConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - kafkaConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - } else { - kafkaConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class); - kafkaConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class); - kafkaConfig.setProperty(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, this.schemaRegistryUrl); - } - } - - private Iterable getAllOutputTopics() { - return Seq.of(this.outputTopic) - .concat(this.extraOutputTopics.values()); - } -} diff --git a/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaStreamsApplication.java b/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaStreamsApplication.java deleted file mode 100644 index 33427103..00000000 --- a/streams-bootstrap/src/main/java/com/bakdata/kafka/KafkaStreamsApplication.java +++ /dev/null @@ -1,406 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka; - -import com.bakdata.kafka.util.ImprovedAdminClient; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; -import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.CountDownLatch; -import java.util.regex.Pattern; -import lombok.Getter; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.Setter; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.Serdes.StringSerde; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.KafkaStreams.CloseOptions; -import org.apache.kafka.streams.KafkaStreams.State; -import org.apache.kafka.streams.KafkaStreams.StateListener; -import org.apache.kafka.streams.StreamsBuilder; -import org.apache.kafka.streams.StreamsConfig; -import org.apache.kafka.streams.Topology; -import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; -import picocli.CommandLine; -import picocli.CommandLine.UseDefaultConverter; - - -/** - *

The base class of the entry point of the streaming application.

- * This class provides common configuration options e.g. {@link #brokers}, {@link #productive} for streaming - * application. Hereby it automatically populates the passed in command line arguments with matching environment - * arguments {@link EnvironmentArgumentsParser}. To implement your streaming application inherit from this class and add - * your custom options. Call {@link #startApplication(KafkaApplication, String[])} with a fresh instance of your class - * from your main. - */ -@ToString(callSuper = true) -@Getter -@Setter -@RequiredArgsConstructor -@Slf4j -public abstract class KafkaStreamsApplication extends KafkaApplication implements AutoCloseable { - private static final int DEFAULT_PRODUCTIVE_REPLICATION_FACTOR = 3; - private final CountDownLatch streamsShutdown = new CountDownLatch(1); - @CommandLine.Option(names = "--input-topics", description = "Input topics", split = ",") - protected List inputTopics = new ArrayList<>(); - @CommandLine.Option(names = "--input-pattern", description = "Input pattern") - protected Pattern inputPattern; - @CommandLine.Option(names = "--error-topic", description = "Error topic") - protected String errorTopic; - @CommandLine.Option(names = "--extra-input-topics", split = ",", description = "Additional named input topics", - converter = {UseDefaultConverter.class, StringListConverter.class}) - protected Map> extraInputTopics = new HashMap<>(); - @CommandLine.Option(names = "--extra-input-patterns", split = ",", description = "Additional named input patterns") - protected Map extraInputPatterns = new HashMap<>(); - @CommandLine.Option(names = "--productive", arity = "1", - description = "Whether to use Kafka Streams configuration values, such as replication.factor=3, that are " - + "more suitable for production environments") - private boolean productive = true; - @CommandLine.Option(names = "--delete-output", arity = "0..1", - description = "Delete the output topic during the clean up.") - private boolean deleteOutputTopic; - @CommandLine.Option(names = "--volatile-group-instance-id", arity = "0..1", - description = "Whether the group instance id is volatile, i.e., it will change on a Streams shutdown.") - private boolean volatileGroupInstanceId; - private KafkaStreams streams; - private Throwable lastException; - - private static boolean isError(final State newState) { - return newState == State.ERROR; - } - - /** - * Run the application. If Kafka Streams is run, this method blocks until Kafka Streams has completed shutdown, - * either because it caught an error or the application has received a shutdown event. - */ - @Override - public void run() { - super.run(); - - try { - final Properties kafkaProperties = this.getKafkaProperties(); - this.streams = new KafkaStreams(this.createTopology(), kafkaProperties); - final StreamsUncaughtExceptionHandler uncaughtExceptionHandler = this.getUncaughtExceptionHandler(); - this.streams.setUncaughtExceptionHandler( - new CapturingStreamsUncaughtExceptionHandler(uncaughtExceptionHandler)); - final StateListener stateListener = this.getStateListener(); - this.streams.setStateListener(new ClosingResourcesStateListener(stateListener)); - - if (this.cleanUp) { - this.runCleanUp(); - } else { - this.runAndAwaitStreamsApplications(); - } - } catch (final Throwable e) { - this.closeResources(); - throw e; - } - if (isError(this.streams.state())) { - // let PicoCLI exit with an error code - if (this.lastException instanceof RuntimeException) { - throw (RuntimeException) this.lastException; - } - throw new StreamsApplicationException("Kafka Streams has transitioned to error", this.lastException); - } - } - - @Override - public void close() { - log.info("Stopping application"); - if (this.streams != null) { - final boolean staticMembershipDisabled = this.isStaticMembershipDisabled(); - final boolean leaveGroup = staticMembershipDisabled || this.volatileGroupInstanceId; - this.closeStreams(leaveGroup); - } - // close resources after streams because messages currently processed might depend on resources - this.closeResources(); - } - - /** - * Build the Kafka Streams topology to be run by the app - * - * @param builder builder to use for building the topology - */ - public abstract void buildTopology(StreamsBuilder builder); - - /** - * This must be set to a unique value for every application interacting with your kafka cluster to ensure internal - * state encapsulation. Could be set to: className-inputTopic-outputTopic - */ - public abstract String getUniqueAppId(); - - /** - * Create the topology of the Kafka Streams app - * - * @return topology of the Kafka Streams app - */ - public Topology createTopology() { - final StreamsBuilder builder = new StreamsBuilder(); - this.buildTopology(builder); - return builder.build(); - } - - /** - * Get first input topic. - * - * @return topic name - * @deprecated Use {@link #getInputTopics()} - */ - @Deprecated(since = "2.1.0") - public String getInputTopic() { - if (this.getInputTopics().isEmpty() || this.getInputTopics().get(0).isBlank()) { - throw new IllegalArgumentException("One input topic required"); - } - return this.getInputTopics().get(0); - } - - /** - * Get extra input topic for a specified role - * - * @param role role of input topic specified in CLI argument - * @return topic name - * @deprecated Use {@link #getInputTopics(String)} - */ - @Deprecated(since = "2.4.0") - public String getInputTopic(final String role) { - final List topic = this.extraInputTopics.get(role); - Preconditions.checkNotNull(topic, "No input topic for role '%s' available", role); - Preconditions.checkArgument(!topic.isEmpty(), "No input topic for role '%s' available", role); - return topic.get(0); - } - - /** - * Get extra input topics for a specified role - * - * @param role role of input topics specified in CLI argument - * @return topic names - */ - public List getInputTopics(final String role) { - final List topics = this.extraInputTopics.get(role); - Preconditions.checkNotNull(topics, "No input topics for role '%s' available", role); - return topics; - } - - /** - * Get extra input pattern for a specified role - * - * @param role role of input pattern specified in CLI argument - * @return topic pattern - */ - public Pattern getInputPattern(final String role) { - final Pattern pattern = this.extraInputPatterns.get(role); - Preconditions.checkNotNull(pattern, "No input pattern for role '%s' available", role); - return pattern; - } - - /** - * Create a {@link StreamsUncaughtExceptionHandler} to use for Kafka Streams. - * - * @return {@code StreamsUncaughtExceptionHandler}. - * @see KafkaStreams#setUncaughtExceptionHandler(StreamsUncaughtExceptionHandler) - */ - protected StreamsUncaughtExceptionHandler getUncaughtExceptionHandler() { - return new DefaultStreamsUncaughtExceptionHandler(); - } - - /** - *

This method should give a default configuration to run your streaming application with.

- * If {@link KafkaApplication#schemaRegistryUrl} is set {@link SpecificAvroSerde} is set as the default key, value - * serde. Otherwise, the {@link StringSerde} is configured as the default key, value serde. To add a custom - * configuration please add a similar method to your custom application class: - *
{@code
-     *   protected Properties createKafkaProperties() {
-     *       # Try to always use the kafka properties from the super class as base Map
-     *       Properties kafkaConfig = super.createKafkaProperties();
-     *       kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, GenericAvroSerde.class);
-     *       kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, GenericAvroSerde.class);
-     *       return kafkaConfig;
-     *   }
-     * }
- * - * @return Returns a default Kafka Streams configuration {@link Properties} - */ - @Override - protected Properties createKafkaProperties() { - final Properties kafkaConfig = new Properties(); - - // exactly once and order - kafkaConfig.setProperty(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE_V2); - kafkaConfig.put(StreamsConfig.producerPrefix(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION), 1); - - // resilience - if (this.productive) { - kafkaConfig.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, DEFAULT_PRODUCTIVE_REPLICATION_FACTOR); - } - - kafkaConfig.setProperty(StreamsConfig.producerPrefix(ProducerConfig.ACKS_CONFIG), "all"); - - // compression - kafkaConfig.setProperty(StreamsConfig.producerPrefix(ProducerConfig.COMPRESSION_TYPE_CONFIG), "gzip"); - - // topology - kafkaConfig.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, this.getUniqueAppId()); - - this.configureDefaultSerde(kafkaConfig); - kafkaConfig.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.getBrokers()); - return kafkaConfig; - } - - /** - * Run the Streams application. This method blocks until Kafka Streams has completed shutdown, either because it - * caught an error or the application has received a shutdown event. - */ - protected void runAndAwaitStreamsApplications() { - this.runStreamsApplication(); - this.awaitStreamsShutdown(); - } - - /** - * Start Kafka Streams and register a ShutdownHook for closing Kafka Streams. - */ - protected void runStreamsApplication() { - this.streams.start(); - Runtime.getRuntime().addShutdownHook(new Thread(this::close)); - } - - /** - * Method to close resources outside of {@link KafkaStreams}. Will be called by default on {@link #close()} and on - * transitioning to {@link State#ERROR}. - */ - protected void closeResources() { - //do nothing by default - } - - /** - * Create a {@link StateListener} to use for Kafka Streams. - * - * @return {@code StateListener}. - * @see KafkaStreams#setStateListener(StateListener) - */ - protected StateListener getStateListener() { - return new NoOpStateListener(); - } - - /** - * This method resets the offset for all input topics and deletes internal topics, application state, and optionally - * the output and error topic. - */ - @Override - protected void runCleanUp() { - try (final ImprovedAdminClient adminClient = this.createAdminClient()) { - final CleanUpRunner cleanUpRunner = CleanUpRunner.builder() - .topology(this.createTopology()) - .appId(this.getUniqueAppId()) - .adminClient(adminClient) - .streams(this.streams) - .build(); - - this.cleanUpRun(cleanUpRunner); - } - this.close(); - } - - protected void cleanUpRun(final CleanUpRunner cleanUpRunner) { - cleanUpRunner.run(this.deleteOutputTopic); - } - - /** - * Wait for Kafka Streams to shut down. Shutdown is detected by a {@link StateListener}. - * - * @see State#hasCompletedShutdown() - */ - protected void awaitStreamsShutdown() { - try { - this.streamsShutdown.await(); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new StreamsApplicationException("Error awaiting Streams shutdown", e); - } - } - - @VisibleForTesting - void closeStreams(final boolean leaveGroup) { - final CloseOptions options = new CloseOptions().leaveGroup(leaveGroup); - log.debug("Closing Kafka Streams with leaveGroup={}", leaveGroup); - this.streams.close(options); - } - - private void configureDefaultSerde(final Properties kafkaConfig) { - if (this.schemaRegistryUrl == null) { - kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); - kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); - } else { - kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); - kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); - kafkaConfig.setProperty(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, this.schemaRegistryUrl); - } - } - - private boolean isStaticMembershipDisabled() { - final Properties kafkaProperties = this.getKafkaProperties(); - return kafkaProperties.getProperty(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG) == null; - } - - @RequiredArgsConstructor - private class CapturingStreamsUncaughtExceptionHandler implements StreamsUncaughtExceptionHandler { - - private @NonNull StreamsUncaughtExceptionHandler wrapped; - - @Override - public StreamThreadExceptionResponse handle(final Throwable exception) { - final StreamThreadExceptionResponse response = this.wrapped.handle(exception); - KafkaStreamsApplication.this.lastException = exception; - return response; - } - } - - @RequiredArgsConstructor - private class ClosingResourcesStateListener implements StateListener { - - private @NonNull StateListener wrapped; - - @Override - public void onChange(final State newState, final State oldState) { - this.wrapped.onChange(newState, oldState); - if (isError(newState)) { - log.debug("Closing resources because of state transition from {} to {}", oldState, newState); - KafkaStreamsApplication.this.closeResources(); - } - if (newState.hasCompletedShutdown()) { - KafkaStreamsApplication.this.streamsShutdown.countDown(); - } - } - } -} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/PropertiesTest.java b/streams-bootstrap/src/test/java/com/bakdata/kafka/PropertiesTest.java deleted file mode 100644 index 854d799d..00000000 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/PropertiesTest.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka; - -import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG; -import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG; -import static org.assertj.core.api.Assertions.assertThat; - -import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; -import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer; -import java.util.Properties; -import org.apache.kafka.common.serialization.Serdes.StringSerde; -import org.apache.kafka.common.serialization.StringSerializer; -import org.apache.kafka.streams.StreamsBuilder; -import org.junit.jupiter.api.Test; - -class PropertiesTest { - - @Test - void shouldPrioritizeConfigCLIParameters() { - final TestApplication app = new TestApplication(); - KafkaApplication.startApplicationWithoutExit(app, - new String[]{ - "--brokers", "fake", - "--schema-registry-url", "fake", - "--output-topic", "output", - "--input-topics", "input", - "--error-topic", "error-topic", - "--streams-config", "foo=baz", - "--streams-config", "kafka=streams" - }); - assertThat(app.getKafkaProperties()) - .containsEntry("foo", "baz") - .containsEntry("kafka", "streams") - .containsEntry("hello", "world"); - } - - @Test - void shouldSetDefaultAvroSerdeWhenSchemaRegistryUrlIsSet() { - final TestApplication app = new TestApplication(); - KafkaApplication.startApplicationWithoutExit(app, - new String[]{ - "--brokers", "fake", - "--schema-registry-url", "fake", - "--output-topic", "output", - "--input-topics", "input", - "--error-topic", "error-topic" - }); - assertThat(app.getKafkaProperties()) - .containsEntry(DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class) - .containsEntry(DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class) - .containsEntry(SCHEMA_REGISTRY_URL_CONFIG, "fake"); - } - - @Test - void shouldSetDefaultStringSerdeWhenSchemaRegistryUrlIsNotSet() { - final TestApplication app = new TestApplication(); - KafkaApplication.startApplicationWithoutExit(app, - new String[]{ - "--brokers", "fake", - "--output-topic", "output", - "--input-topics", "input", - "--error-topic", "error-topic" - }); - assertThat(app.getKafkaProperties()) - .containsEntry(DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class) - .containsEntry(DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); - } - - @Test - void shouldSetDefaultAvroSerializerWhenSchemaRegistryUrlIsSet() { - final TestProducer app = new TestProducer(); - KafkaApplication.startApplicationWithoutExit(app, - new String[]{ - "--brokers", "fake", - "--schema-registry-url", "fake", - "--output-topic", "output", - "--input-topics", "input", - "--error-topic", "error-topic" - }); - assertThat(app.getKafkaProperties()) - .containsEntry(KEY_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class) - .containsEntry(VALUE_SERIALIZER_CLASS_CONFIG, SpecificAvroSerializer.class); - } - - @Test - void shouldSetDefaultStringSerializerWhenSchemaRegistryUrlIsNotSet() { - final TestProducer app = new TestProducer(); - KafkaApplication.startApplicationWithoutExit(app, - new String[]{ - "--brokers", "fake", - "--output-topic", "output", - "--input-topics", "input", - "--error-topic", "error-topic" - }); - assertThat(app.getKafkaProperties()) - .containsEntry(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .containsEntry(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - } - - private static class TestApplication extends KafkaStreamsApplication { - - @Override - public void buildTopology(final StreamsBuilder builder) { - throw new UnsupportedOperationException(); - } - - @Override - public void run() { - // do nothing - } - - @Override - public String getUniqueAppId() { - return "foo"; - } - - @Override - protected Properties createKafkaProperties() { - final Properties properties = super.createKafkaProperties(); - properties.setProperty("foo", "bar"); - properties.setProperty("hello", "world"); - return properties; - } - } - - private static class TestProducer extends KafkaProducerApplication { - - @Override - protected void runApplication() { - // do noting - } - } -} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/WordCountTest.java b/streams-bootstrap/src/test/java/com/bakdata/kafka/WordCountTest.java deleted file mode 100644 index ff0b6d0d..00000000 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/WordCountTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka; - -import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG; -import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG; - -import com.bakdata.fluent_kafka_streams_tests.junit5.TestTopologyExtension; -import com.bakdata.kafka.test_applications.WordCount; -import org.apache.kafka.common.serialization.Serdes; -import org.apache.kafka.common.serialization.Serdes.StringSerde; -import org.assertj.core.api.SoftAssertions; -import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; -import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.api.extension.RegisterExtension; -import picocli.CommandLine; - -@ExtendWith(SoftAssertionsExtension.class) -class WordCountTest { - private static final String[] ARGS = { - "--input-topics", "Input,Input2", - "--output-topic", "Output", - "--brokers", "localhost:9092", - "--streams-config", "test.ack=1,test1.ack=2" - }; - private final WordCount app = CommandLine.populateCommand(new WordCount(), ARGS); - @RegisterExtension - final TestTopologyExtension testTopology = - StreamsBootstrapTopologyFactory.createTopologyExtension(this.app); - @InjectSoftAssertions - private SoftAssertions softly; - - @Test - void shouldAggregateSameWordStream() { - this.testTopology.input("Input") - .add("bla") - .add("blub") - .add("bla"); - - this.testTopology.streamOutput().withValueSerde(Serdes.Long()) - .expectNextRecord().hasKey("bla").hasValue(1L) - .expectNextRecord().hasKey("blub").hasValue(1L) - .expectNextRecord().hasKey("bla").hasValue(2L) - .expectNoMoreRecord(); - } - - @Test - void shouldSetKafkaProperties() { - this.softly.assertThat(this.app.getKafkaProperties()).containsEntry("test.ack", "1"); - this.softly.assertThat(this.app.getKafkaProperties()).containsEntry("test1.ack", "2"); - } - - @Test - void shouldSetDefaultSerdeWhenSchemaRegistryUrlIsNotSet() { - this.softly.assertThat(this.app.getKafkaProperties()) - .containsEntry(DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); - this.softly.assertThat(this.app.getKafkaProperties()) - .containsEntry(DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); - } - - @Test - void shouldParseMultipleInputTopics() { - this.softly.assertThat(this.app.getInputTopics()) - .containsExactly("Input", "Input2"); - } -} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/RunProducerAppTest.java b/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/RunProducerAppTest.java deleted file mode 100644 index 11db8b72..00000000 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/RunProducerAppTest.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka.integration; - -import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; -import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig; -import static net.mguenther.kafka.junit.Wait.delay; -import static org.assertj.core.api.Assertions.assertThat; - -import com.bakdata.kafka.KafkaProducerApplication; -import com.bakdata.kafka.TestRecord; -import com.bakdata.schemaregistrymock.junit5.SchemaRegistryMockExtension; -import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; -import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; -import io.confluent.kafka.streams.serdes.avro.SpecificAvroDeserializer; -import java.io.IOException; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.TimeUnit; -import net.mguenther.kafka.junit.EmbeddedKafkaCluster; -import net.mguenther.kafka.junit.ReadKeyValues; -import net.mguenther.kafka.junit.TopicConfig; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -class RunProducerAppTest { - private static final int TIMEOUT_SECONDS = 10; - @RegisterExtension - final SchemaRegistryMockExtension schemaRegistryMockExtension = new SchemaRegistryMockExtension(); - private final EmbeddedKafkaCluster kafkaCluster = provisionWith(defaultClusterConfig()); - - @BeforeEach - void setup() { - this.kafkaCluster.start(); - } - - @AfterEach - void teardown() { - this.kafkaCluster.stop(); - } - - @Test - void shouldRunApp() throws InterruptedException, IOException, RestClientException { - final String output = "output"; - this.kafkaCluster.createTopic(TopicConfig.withName(output).useDefaults()); - final KafkaProducerApplication app = new KafkaProducerApplication() { - @Override - protected void runApplication() { - try (final KafkaProducer producer = this.createProducer()) { - final TestRecord record = TestRecord.newBuilder().setContent("bar").build(); - producer.send(new ProducerRecord<>(this.getOutputTopic(), "foo", record)); - } - } - - @Override - protected Properties createKafkaProperties() { - final Properties kafkaProperties = super.createKafkaProperties(); - kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - return kafkaProperties; - } - }; - app.setBrokers(this.kafkaCluster.getBrokerList()); - app.setSchemaRegistryUrl(this.schemaRegistryMockExtension.getUrl()); - app.setOutputTopic(output); - app.setStreamsConfig(Map.of( - ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000" - )); - app.run(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - assertThat(this.kafkaCluster.read(ReadKeyValues.from(output, String.class, TestRecord.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, SpecificAvroDeserializer.class) - .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - this.schemaRegistryMockExtension.getUrl()) - .build())) - .hasSize(1) - .anySatisfy(kv -> { - assertThat(kv.getKey()).isEqualTo("foo"); - assertThat(kv.getValue().getContent()).isEqualTo("bar"); - }); - final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient(); - - assertThat(client.getAllSubjects()) - .contains(app.getOutputTopic() + "-value"); - app.setCleanUp(true); - app.run(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - assertThat(client.getAllSubjects()) - .doesNotContain(app.getOutputTopic() + "-value"); - assertThat(this.kafkaCluster.exists(app.getOutputTopic())) - .as("Output topic is deleted") - .isFalse(); - } -} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/RunStreamsAppTest.java b/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/RunStreamsAppTest.java deleted file mode 100644 index 6ffa1100..00000000 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/RunStreamsAppTest.java +++ /dev/null @@ -1,275 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka.integration; - -import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; -import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig; -import static net.mguenther.kafka.junit.Wait.delay; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.bakdata.kafka.CloseFlagApp; -import com.bakdata.kafka.KafkaStreamsApplication; -import com.bakdata.kafka.test_applications.ExtraInputTopics; -import com.bakdata.kafka.test_applications.Mirror; -import com.google.common.collect.ImmutableMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import net.mguenther.kafka.junit.EmbeddedKafkaCluster; -import net.mguenther.kafka.junit.KeyValue; -import net.mguenther.kafka.junit.ReadKeyValues; -import net.mguenther.kafka.junit.SendKeyValuesTransactional; -import net.mguenther.kafka.junit.TopicConfig; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; -import org.apache.kafka.streams.KafkaStreams.State; -import org.apache.kafka.streams.KafkaStreams.StateListener; -import org.apache.kafka.streams.StreamsBuilder; -import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; -import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse; -import org.apache.kafka.streams.kstream.KStream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class RunStreamsAppTest { - private static final int TIMEOUT_SECONDS = 10; - private EmbeddedKafkaCluster kafkaCluster; - private KafkaStreamsApplication app = null; - @Mock - private StreamsUncaughtExceptionHandler uncaughtExceptionHandler; - @Mock - private StateListener stateListener; - - @BeforeEach - void setup() { - this.kafkaCluster = provisionWith(defaultClusterConfig()); - this.kafkaCluster.start(); - } - - @AfterEach - void teardown() throws InterruptedException { - if (this.app != null) { - this.app.close(); - this.app = null; - } - - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.kafkaCluster.stop(); - } - - @Test - void shouldRunApp() throws InterruptedException { - final String input = "input"; - final String output = "output"; - this.kafkaCluster.createTopic(TopicConfig.withName(input).useDefaults()); - this.kafkaCluster.createTopic(TopicConfig.withName(output).useDefaults()); - this.setupApp(new Mirror()); - this.app.setInputTopics(List.of(input)); - this.app.setOutputTopic(output); - this.runApp(); - final SendKeyValuesTransactional kvSendKeyValuesTransactionalBuilder = - SendKeyValuesTransactional.inTransaction(input, List.of(new KeyValue<>("foo", "bar"))) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .build(); - this.kafkaCluster.send(kvSendKeyValuesTransactionalBuilder); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - assertThat(this.kafkaCluster.read(ReadKeyValues.from(output, String.class, String.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .build())) - .hasSize(1); - } - - @Test - void shouldUseMultipleExtraInputTopics() throws InterruptedException { - final String input1 = "input1"; - final String input2 = "input2"; - final String output = "output"; - this.kafkaCluster.createTopic(TopicConfig.withName(input1).useDefaults()); - this.kafkaCluster.createTopic(TopicConfig.withName(input2).useDefaults()); - this.kafkaCluster.createTopic(TopicConfig.withName(output).useDefaults()); - this.setupApp(new ExtraInputTopics()); - this.app.setExtraInputTopics(Map.of("role", List.of(input1, input2))); - this.app.setOutputTopic(output); - this.runApp(); - this.kafkaCluster.send(SendKeyValuesTransactional.inTransaction(input1, List.of(new KeyValue<>("foo", "bar"))) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .build()); - this.kafkaCluster.send(SendKeyValuesTransactional.inTransaction(input2, List.of(new KeyValue<>("foo", "baz"))) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .build()); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - assertThat(this.kafkaCluster.read(ReadKeyValues.from(output, String.class, String.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .build())) - .hasSize(2); - } - - @Test - void shouldCallCloseResourcesOnMissingInputTopic() throws InterruptedException { - final String input = "input"; - final String output = "output"; - this.kafkaCluster.createTopic(TopicConfig.withName(output).useDefaults()); - final CloseResourcesApplication closeResourcesApplication = new CloseResourcesApplication(); - this.setupApp(closeResourcesApplication); - this.app.setInputTopics(List.of(input)); - this.app.setOutputTopic(output); - when(this.uncaughtExceptionHandler.handle(any())).thenReturn(StreamThreadExceptionResponse.SHUTDOWN_CLIENT); - this.runApp(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - assertThat(closeResourcesApplication.getResourcesClosed()).isEqualTo(1); - verify(this.uncaughtExceptionHandler).handle(any()); - verify(this.stateListener).onChange(State.ERROR, State.PENDING_ERROR); - } - - @Test - void shouldCallCloseResourcesOnMapError() throws InterruptedException { - final String input = "input"; - final String output = "output"; - this.kafkaCluster.createTopic(TopicConfig.withName(input).useDefaults()); - this.kafkaCluster.createTopic(TopicConfig.withName(output).useDefaults()); - final CloseResourcesApplication closeResourcesApplication = new CloseResourcesApplication(); - this.setupApp(closeResourcesApplication); - this.app.setInputTopics(List.of(input)); - this.app.setOutputTopic(output); - when(this.uncaughtExceptionHandler.handle(any())).thenReturn(StreamThreadExceptionResponse.SHUTDOWN_CLIENT); - this.runApp(); - final SendKeyValuesTransactional kvSendKeyValuesTransactionalBuilder = - SendKeyValuesTransactional.inTransaction(input, List.of(new KeyValue<>("foo", "bar"))) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class) - .build(); - this.kafkaCluster.send(kvSendKeyValuesTransactionalBuilder); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - assertThat(closeResourcesApplication.getResourcesClosed()).isEqualTo(1); - verify(this.uncaughtExceptionHandler).handle(any()); - verify(this.stateListener).onChange(State.ERROR, State.PENDING_ERROR); - } - - @Test - void shouldLeaveGroup() throws InterruptedException { - final CloseFlagApp closeApplication = this.createCloseFlagApp(); - this.runApp(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.app.close(); - assertThat(closeApplication.getLeaveGroup()).isTrue(); - } - - @Test - void shouldNotLeaveGroup() throws InterruptedException { - final CloseFlagApp closeApplication = this.createCloseFlagApp(); - this.app.setStreamsConfig(ImmutableMap.builder() - .putAll(this.app.getStreamsConfig()) - .put(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "foo") - .build()); - this.runApp(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.app.close(); - assertThat(closeApplication.getLeaveGroup()).isFalse(); - } - - @Test - void shouldLeaveGroupWithVolatileGroupId() throws InterruptedException { - final CloseFlagApp closeApplication = this.createCloseFlagApp(); - this.app.setStreamsConfig(ImmutableMap.builder() - .putAll(this.app.getStreamsConfig()) - .put(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "foo") - .build()); - this.app.setVolatileGroupInstanceId(true); - this.runApp(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.app.close(); - assertThat(closeApplication.getLeaveGroup()).isTrue(); - } - - private CloseFlagApp createCloseFlagApp() { - final CloseFlagApp closeApplication = new CloseFlagApp(); - this.setupApp(closeApplication); - this.app.setInputTopics(List.of("input")); - this.app.setOutputTopic("output"); - return closeApplication; - } - - private void setupApp(final KafkaStreamsApplication application) { - this.app = application; - this.app.setBrokers(this.kafkaCluster.getBrokerList()); - this.app.setStreamsConfig(Map.of( - ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000" - )); - } - - private void runApp() { - // run in Thread because the application blocks indefinitely - new Thread(this.app).start(); - } - - @Getter - @RequiredArgsConstructor - private class CloseResourcesApplication extends KafkaStreamsApplication { - private int resourcesClosed = 0; - - @Override - public void buildTopology(final StreamsBuilder builder) { - final KStream input = builder.stream(this.getInputTopics()); - input.map((k, v) -> {throw new RuntimeException();}).to(this.getOutputTopic()); - } - - @Override - public String getUniqueAppId() { - return this.getClass().getSimpleName() + "-" + this.getOutputTopic(); - } - - @Override - protected void closeResources() { - this.resourcesClosed++; - } - - @Override - protected StreamsUncaughtExceptionHandler getUncaughtExceptionHandler() { - return RunStreamsAppTest.this.uncaughtExceptionHandler; - } - - @Override - protected StateListener getStateListener() { - return RunStreamsAppTest.this.stateListener; - } - } -} diff --git a/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpTest.java b/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpTest.java deleted file mode 100644 index 8e21f9eb..00000000 --- a/streams-bootstrap/src/test/java/com/bakdata/kafka/integration/StreamsCleanUpTest.java +++ /dev/null @@ -1,654 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2023 bakdata - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package com.bakdata.kafka.integration; - - -import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; -import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig; -import static net.mguenther.kafka.junit.Wait.delay; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; - -import com.bakdata.kafka.CleanUpException; -import com.bakdata.kafka.CleanUpRunner; -import com.bakdata.kafka.CloseFlagApp; -import com.bakdata.kafka.KafkaStreamsApplication; -import com.bakdata.kafka.TestRecord; -import com.bakdata.kafka.test_applications.ComplexTopologyApplication; -import com.bakdata.kafka.test_applications.MirrorKeyWithAvro; -import com.bakdata.kafka.test_applications.MirrorValueWithAvro; -import com.bakdata.kafka.test_applications.WordCount; -import com.bakdata.kafka.test_applications.WordCountPattern; -import com.bakdata.kafka.util.ImprovedAdminClient; -import com.bakdata.schemaregistrymock.junit5.SchemaRegistryMockExtension; -import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; -import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; -import io.confluent.kafka.serializers.KafkaAvroSerializer; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Consumer; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; -import net.mguenther.kafka.junit.EmbeddedKafkaCluster; -import net.mguenther.kafka.junit.KeyValue; -import net.mguenther.kafka.junit.ReadKeyValues; -import net.mguenther.kafka.junit.SendKeyValuesTransactional; -import net.mguenther.kafka.junit.SendValuesTransactional; -import net.mguenther.kafka.junit.TopicConfig; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.ConsumerGroupListing; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.LongDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; -import org.apache.kafka.streams.StreamsConfig; -import org.assertj.core.api.SoftAssertions; -import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; -import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.api.extension.RegisterExtension; -import org.junitpioneer.jupiter.SetEnvironmentVariable; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.mockito.junit.jupiter.MockitoSettings; -import org.mockito.quality.Strictness; - -@Slf4j -@ExtendWith(SoftAssertionsExtension.class) -@ExtendWith(MockitoExtension.class) -@MockitoSettings(strictness = Strictness.STRICT_STUBS) -class StreamsCleanUpTest { - private static final int TIMEOUT_SECONDS = 10; - @RegisterExtension - final SchemaRegistryMockExtension schemaRegistryMockExtension = new SchemaRegistryMockExtension(); - private EmbeddedKafkaCluster kafkaCluster; - private KafkaStreamsApplication app = null; - @InjectSoftAssertions - private SoftAssertions softly; - @Mock - private Consumer topicCleanUpHook; - - @BeforeEach - void setup() throws InterruptedException { - this.kafkaCluster = provisionWith(defaultClusterConfig()); - this.kafkaCluster.start(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - } - - @AfterEach - void teardown() throws InterruptedException { - if (this.app != null) { - this.app.close(); - this.app.getStreams().cleanUp(); - this.app = null; - } - - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.kafkaCluster.stop(); - } - - @Test - void shouldDeleteTopic() throws InterruptedException { - this.app = this.createWordCountApplication(); - final SendValuesTransactional sendRequest = SendValuesTransactional - .inTransaction(this.app.getInputTopic(), List.of("blub", "bla", "blub")) - .useDefaults(); - this.kafkaCluster.send(sendRequest); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - - final List> expectedValues = - List.of(new KeyValue<>("blub", 1L), - new KeyValue<>("bla", 1L), - new KeyValue<>("blub", 2L) - ); - - this.runAndAssertContent(expectedValues, "WordCount contains all elements after first run"); - - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUpWithDeletion(); - - this.softly.assertThat(this.kafkaCluster.exists(this.app.getOutputTopic())) - .as("Output topic is deleted") - .isFalse(); - - this.softly.assertThat(this.kafkaCluster.exists(this.app.getErrorTopic())) - .as("Error topic is deleted") - .isFalse(); - } - - @Test - void shouldDeleteConsumerGroup() throws InterruptedException { - this.app = this.createWordCountApplication(); - final SendValuesTransactional sendRequest = SendValuesTransactional - .inTransaction(this.app.getInputTopic(), List.of("blub", "bla", "blub")) - .useDefaults(); - this.kafkaCluster.send(sendRequest); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - - final List> expectedValues = - List.of(new KeyValue<>("blub", 1L), - new KeyValue<>("bla", 1L), - new KeyValue<>("blub", 2L) - ); - - this.runAndAssertContent(expectedValues, "WordCount contains all elements after first run"); - - try (final AdminClient adminClient = AdminClient.create(this.app.getKafkaProperties())) { - this.softly.assertThat(adminClient.listConsumerGroups().all().get(TIMEOUT_SECONDS, TimeUnit.SECONDS)) - .extracting(ConsumerGroupListing::groupId) - .as("Consumer group exists") - .contains(this.app.getUniqueAppId()); - } catch (final TimeoutException | ExecutionException e) { - throw new RuntimeException("Error retrieving consumer groups", e); - } - - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUpWithDeletion(); - - try (final AdminClient adminClient = AdminClient.create(this.app.getKafkaProperties())) { - this.softly.assertThat(adminClient.listConsumerGroups().all().get(TIMEOUT_SECONDS, TimeUnit.SECONDS)) - .extracting(ConsumerGroupListing::groupId) - .as("Consumer group is deleted") - .doesNotContain(this.app.getUniqueAppId()); - } catch (final TimeoutException | ExecutionException e) { - throw new RuntimeException("Error retrieving consumer groups", e); - } - } - - @Test - void shouldNotThrowAnErrorIfConsumerGroupDoesNotExist() throws InterruptedException { - this.app = this.createWordCountApplication(); - final SendValuesTransactional sendRequest = SendValuesTransactional - .inTransaction(this.app.getInputTopic(), List.of("blub", "bla", "blub")) - .useDefaults(); - this.kafkaCluster.send(sendRequest); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - - final List> expectedValues = - List.of(new KeyValue<>("blub", 1L), - new KeyValue<>("bla", 1L), - new KeyValue<>("blub", 2L) - ); - - this.runAndAssertContent(expectedValues, "WordCount contains all elements after first run"); - - try (final AdminClient adminClient = AdminClient.create(this.app.getKafkaProperties())) { - this.softly.assertThat(adminClient.listConsumerGroups().all().get(TIMEOUT_SECONDS, TimeUnit.SECONDS)) - .extracting(ConsumerGroupListing::groupId) - .as("Consumer group exists") - .contains(this.app.getUniqueAppId()); - } catch (final TimeoutException | ExecutionException e) { - throw new RuntimeException("Error retrieving consumer groups", e); - } - - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - - try (final AdminClient adminClient = AdminClient.create(this.app.getKafkaProperties())) { - adminClient.deleteConsumerGroups(List.of(this.app.getUniqueAppId())).all() - .get(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.softly.assertThat(adminClient.listConsumerGroups().all().get(TIMEOUT_SECONDS, TimeUnit.SECONDS)) - .extracting(ConsumerGroupListing::groupId) - .as("Consumer group is deleted") - .doesNotContain(this.app.getUniqueAppId()); - } catch (final TimeoutException | ExecutionException e) { - throw new RuntimeException("Error deleting consumer group", e); - } - this.softly.assertThatCode(this::runCleanUpWithDeletion).doesNotThrowAnyException(); - } - - @Test - void shouldDeleteInternalTopics() throws InterruptedException { - this.app = this.createComplexApplication(); - - final String inputTopic = this.app.getInputTopic(); - final String internalTopic = - this.app.getUniqueAppId() + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition"; - final String backingTopic = - this.app.getUniqueAppId() + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog"; - final String manualTopic = ComplexTopologyApplication.THROUGH_TOPIC; - - final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); - final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional - .inTransaction(this.app.getInputTopic(), Collections.singletonList(new KeyValue<>("key 1", testRecord))) - .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - this.schemaRegistryMockExtension.getUrl()) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) - .build(); - - this.kafkaCluster.send(sendRequest); - this.runAppAndClose(); - - this.softly.assertThat(this.kafkaCluster.exists(inputTopic)).isTrue(); - this.softly.assertThat(this.kafkaCluster.exists(internalTopic)).isTrue(); - this.softly.assertThat(this.kafkaCluster.exists(backingTopic)).isTrue(); - this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isTrue(); - - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUp(); - - this.softly.assertThat(this.kafkaCluster.exists(inputTopic)).isTrue(); - this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isTrue(); - - this.softly.assertThat(this.kafkaCluster.exists(internalTopic)).isFalse(); - this.softly.assertThat(this.kafkaCluster.exists(backingTopic)).isFalse(); - } - - - @Test - void shouldDeleteIntermediateTopics() throws InterruptedException { - this.app = this.createComplexApplication(); - - final String manualTopic = ComplexTopologyApplication.THROUGH_TOPIC; - - final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); - final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional - .inTransaction(this.app.getInputTopic(), Collections.singletonList(new KeyValue<>("key 1", testRecord))) - .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - this.schemaRegistryMockExtension.getUrl()) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) - .build(); - - this.kafkaCluster.send(sendRequest); - this.runAppAndClose(); - - this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isTrue(); - - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUpWithDeletion(); - - this.softly.assertThat(this.kafkaCluster.exists(manualTopic)).isFalse(); - } - - @Test - void shouldDeleteState() throws InterruptedException { - this.app = this.createWordCountApplication(); - final SendValuesTransactional sendRequest = SendValuesTransactional - .inTransaction(this.app.getInputTopic(), List.of("blub", "bla", "blub")) - .useDefaults(); - this.kafkaCluster.send(sendRequest); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - - final List> expectedValues = List.of( - new KeyValue<>("blub", 1L), - new KeyValue<>("bla", 1L), - new KeyValue<>("blub", 2L) - ); - - this.runAndAssertContent(expectedValues, "All entries are once in the input topic after the 1st run"); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUp(); - - final List> entriesTwice = expectedValues.stream() - .flatMap(entry -> Stream.of(entry, entry)) - .collect(Collectors.toList()); - this.runAndAssertContent(entriesTwice, "All entries are twice in the input topic after the 2nd run"); - } - - @Test - void shouldReprocessAlreadySeenRecords() throws InterruptedException { - this.app = this.createWordCountApplication(); - final SendValuesTransactional sendRequest = - SendValuesTransactional.inTransaction(this.app.getInputTopic(), - Arrays.asList("a", "b", "c")).useDefaults(); - this.kafkaCluster.send(sendRequest); - - this.runAndAssertSize(3); - this.runAndAssertSize(3); - - // Wait until all stream application are completely stopped before triggering cleanup - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUp(); - this.runAndAssertSize(6); - } - - @Test - void shouldReprocessAlreadySeenRecordsWithPattern() throws InterruptedException { - this.app = this.createWordCountPatternApplication(); - this.kafkaCluster.send(SendValuesTransactional.inTransaction("input_topic", - Arrays.asList("a", "b")).useDefaults()); - this.kafkaCluster.send(SendValuesTransactional.inTransaction("another_topic", - List.of("c")).useDefaults()); - - this.runAndAssertSize(3); - this.runAndAssertSize(3); - - // Wait until all stream application are completely stopped before triggering cleanup - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUp(); - this.runAndAssertSize(6); - } - - @Test - void shouldDeleteValueSchema() - throws InterruptedException, IOException, RestClientException { - this.app = this.createMirrorValueApplication(); - final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient(); - final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); - final SendValuesTransactional sendRequest = SendValuesTransactional - .inTransaction(this.app.getInputTopic(), Collections.singletonList(testRecord)) - .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - this.schemaRegistryMockExtension.getUrl()) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) - .build(); - - this.kafkaCluster.send(sendRequest); - this.runAppAndClose(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.softly.assertThat(client.getAllSubjects()) - .contains(this.app.getOutputTopic() + "-value", this.app.getInputTopic() + "-value"); - this.runCleanUpWithDeletion(); - this.softly.assertThat(client.getAllSubjects()) - .doesNotContain(this.app.getOutputTopic() + "-value") - .contains(this.app.getInputTopic() + "-value"); - } - - @Test - void shouldDeleteKeySchema() - throws InterruptedException, IOException, RestClientException { - this.app = this.createMirrorKeyApplication(); - final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient(); - final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); - final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional - .inTransaction(this.app.getInputTopic(), Collections.singletonList(new KeyValue<>(testRecord, "val"))) - .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - this.schemaRegistryMockExtension.getUrl()) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) - .build(); - - this.kafkaCluster.send(sendRequest); - this.runAppAndClose(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.softly.assertThat(client.getAllSubjects()) - .contains(this.app.getOutputTopic() + "-key", this.app.getInputTopic() + "-key"); - this.runCleanUpWithDeletion(); - this.softly.assertThat(client.getAllSubjects()) - .doesNotContain(this.app.getOutputTopic() + "-key") - .contains(this.app.getInputTopic() + "-key"); - } - - @Test - void shouldDeleteSchemaOfInternalTopics() - throws InterruptedException, IOException, RestClientException { - this.app = this.createComplexApplication(); - - final String inputSubject = this.app.getInputTopic() + "-value"; - final String internalSubject = - this.app.getUniqueAppId() + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition" + "-value"; - final String backingSubject = - this.app.getUniqueAppId() + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog" + "-value"; - final String manualSubject = ComplexTopologyApplication.THROUGH_TOPIC + "-value"; - - final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient(); - final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); - final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional - .inTransaction(this.app.getInputTopic(), Collections.singletonList(new KeyValue<>("key 1", testRecord))) - .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - this.schemaRegistryMockExtension.getUrl()) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) - .build(); - - this.kafkaCluster.send(sendRequest); - this.runAppAndClose(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.softly.assertThat(client.getAllSubjects()) - .contains(inputSubject, internalSubject, backingSubject, manualSubject); - - this.runCleanUp(); - - this.softly.assertThat(client.getAllSubjects()) - .doesNotContain(internalSubject, backingSubject) - .contains(inputSubject, manualSubject); - } - - - @Test - void shouldDeleteSchemaOfIntermediateTopics() - throws InterruptedException, IOException, RestClientException { - this.app = this.createComplexApplication(); - - final String manualSubject = ComplexTopologyApplication.THROUGH_TOPIC + "-value"; - - final SchemaRegistryClient client = this.schemaRegistryMockExtension.getSchemaRegistryClient(); - final TestRecord testRecord = TestRecord.newBuilder().setContent("key 1").build(); - final SendKeyValuesTransactional sendRequest = SendKeyValuesTransactional - .inTransaction(this.app.getInputTopic(), Collections.singletonList(new KeyValue<>("key 1", testRecord))) - .with(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - this.schemaRegistryMockExtension.getUrl()) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()) - .build(); - - this.kafkaCluster.send(sendRequest); - this.runAppAndClose(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.softly.assertThat(client.getAllSubjects()).contains(manualSubject); - this.runCleanUpWithDeletion(); - this.softly.assertThat(client.getAllSubjects()).doesNotContain(manualSubject); - } - - @Test - void shouldCallCleanupHookForInternalTopics() { - this.app = this.createComplexCleanUpHookApplication(); - - this.runCleanUp(); - final String uniqueAppId = this.app.getUniqueAppId(); - verify(this.topicCleanUpHook).accept(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition"); - verify(this.topicCleanUpHook).accept(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-changelog"); - verify(this.topicCleanUpHook).accept(uniqueAppId + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog"); - verifyNoMoreInteractions(this.topicCleanUpHook); - } - - @Test - void shouldCallCleanUpHookForAllTopics() { - this.app = this.createComplexCleanUpHookApplication(); - - this.runCleanUpWithDeletion(); - final String uniqueAppId = this.app.getUniqueAppId(); - verify(this.topicCleanUpHook).accept(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-repartition"); - verify(this.topicCleanUpHook).accept(uniqueAppId + "-KSTREAM-AGGREGATE-STATE-STORE-0000000008-changelog"); - verify(this.topicCleanUpHook).accept(uniqueAppId + "-KSTREAM-REDUCE-STATE-STORE-0000000003-changelog"); - verify(this.topicCleanUpHook).accept(ComplexTopologyApplication.THROUGH_TOPIC); - verify(this.topicCleanUpHook).accept(this.app.getOutputTopic()); - verifyNoMoreInteractions(this.topicCleanUpHook); - } - - @Test - void shouldCallClose() throws InterruptedException { - final CloseFlagApp closeApplication = this.createCloseApplication(); - this.app = closeApplication; - this.kafkaCluster.createTopic(TopicConfig.withName(this.app.getInputTopic()).useDefaults()); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - // if we don't run the app, the coordinator will be unavailable - this.runAppAndClose(); - closeApplication.setClosed(false); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.runCleanUpWithDeletion(); - this.softly.assertThat(closeApplication.isClosed()).isTrue(); - } - - @Test - @SetEnvironmentVariable(key = "STREAMS_FOO_BAR", value = "baz") - void shouldConfigureAdminClient() { - final CloseFlagApp closeApplication = this.createCloseApplication(); - final ImprovedAdminClient adminClient = closeApplication.createAdminClient(); - final Properties properties = adminClient.getProperties(); - this.softly.assertThat(properties.getProperty("foo.bar")).isEqualTo("baz"); - } - - @Test - void shouldNotThrowExceptionOnMissingInputTopic() throws InterruptedException { - this.app = this.createMirrorKeyApplication(); - // if we don't run the app, the coordinator will be unavailable - this.runAppAndClose(); - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - this.softly.assertThatCode(this::runCleanUpWithDeletion).doesNotThrowAnyException(); - } - - @Test - void shouldThrowExceptionOnResetterError() throws InterruptedException { - this.app = this.createMirrorKeyApplication(); - this.kafkaCluster.createTopic(TopicConfig.withName(this.app.getInputTopic()).useDefaults()); - this.runApp(); - //should throw exception because consumer group is still active - this.softly.assertThatThrownBy(this::runCleanUpWithDeletion) - .isInstanceOf(CleanUpException.class) - .hasMessageContaining("Error running streams resetter. Exit code 1"); - this.app.close(); - } - - private List> readOutputTopic(final String outputTopic) throws InterruptedException { - final ReadKeyValues readRequest = ReadKeyValues.from(outputTopic, Long.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class).build(); - return this.kafkaCluster.read(readRequest); - } - - private void runCleanUp() { - this.app.setCleanUp(true); - this.app.run(); - this.app.setCleanUp(false); - } - - private void runCleanUpWithDeletion() { - this.app.setDeleteOutputTopic(true); - this.runCleanUp(); - this.app.setDeleteOutputTopic(false); - } - - private void runAndAssertContent(final Iterable> expectedValues, - final String description) - throws InterruptedException { - this.runAppAndClose(); - - final List> output = this.readOutputTopic(this.app.getOutputTopic()); - this.softly.assertThat(output) - .as(description) - .containsExactlyInAnyOrderElementsOf(expectedValues); - } - - private void runAndAssertSize(final int expectedMessageCount) - throws InterruptedException { - this.runAppAndClose(); - final List> records = this.readOutputTopic(this.app.getOutputTopic()); - this.softly.assertThat(records).hasSize(expectedMessageCount); - } - - private void runApp() throws InterruptedException { - // run in Thread because the application blocks indefinitely - new Thread(this.app).start(); - // Wait until stream application has consumed all data - delay(TIMEOUT_SECONDS, TimeUnit.SECONDS); - } - - private void runAppAndClose() throws InterruptedException { - this.runApp(); - this.app.close(); - } - - private KafkaStreamsApplication createWordCountApplication() { - return this.setupAppNoSr(new WordCount(), "word_input", "word_output", "word_error"); - } - - private KafkaStreamsApplication createWordCountPatternApplication() { - return this.setupAppNoSr(new WordCountPattern(), Pattern.compile(".*_topic"), "word_output", "word_error"); - } - - private KafkaStreamsApplication createMirrorValueApplication() { - return this.setupAppWithSr(new MirrorValueWithAvro(), "input", "output", "key_error"); - } - - private CloseFlagApp createCloseApplication() { - return this.setupAppWithSr(new CloseFlagApp(), "input", "output", "key_error"); - } - - private KafkaStreamsApplication createMirrorKeyApplication() { - return this.setupAppWithSr(new MirrorKeyWithAvro(), "input", "output", "value_error"); - } - - private KafkaStreamsApplication createComplexApplication() { - this.kafkaCluster.createTopic(TopicConfig.withName(ComplexTopologyApplication.THROUGH_TOPIC).useDefaults()); - return this.setupAppWithSr(new ComplexTopologyApplication(), "input", "output", "value_error"); - } - - private KafkaStreamsApplication createComplexCleanUpHookApplication() { - this.kafkaCluster.createTopic(TopicConfig.withName(ComplexTopologyApplication.THROUGH_TOPIC).useDefaults()); - return this.setupAppWithSr(new ComplexTopologyApplication() { - @Override - protected void cleanUpRun(final CleanUpRunner cleanUpRunner) { - cleanUpRunner.registerTopicCleanUpHook(StreamsCleanUpTest.this.topicCleanUpHook); - super.cleanUpRun(cleanUpRunner); - } - }, "input", "output", "value_error"); - } - - private T setupAppWithSr(final T application, final String inputTopicName, - final String outputTopicName, final String errorTopicName) { - this.setupApp(application, outputTopicName, errorTopicName); - application.setSchemaRegistryUrl(this.schemaRegistryMockExtension.getUrl()); - application.setInputTopics(List.of(inputTopicName)); - return application; - } - - private T setupAppNoSr(final T application, final String inputTopicName, - final String outputTopicName, final String errorTopicName) { - this.setupApp(application, outputTopicName, errorTopicName); - application.setInputTopics(List.of(inputTopicName)); - return application; - } - - private T setupAppNoSr(final T application, final Pattern inputPattern, - final String outputTopicName, final String errorTopicName) { - this.setupApp(application, outputTopicName, errorTopicName); - application.setInputPattern(inputPattern); - return application; - } - - private void setupApp(final T application, final String outputTopicName, - final String errorTopicName) { - application.setOutputTopic(outputTopicName); - application.setErrorTopic(errorTopicName); - application.setBrokers(this.kafkaCluster.getBrokerList()); - application.setProductive(false); - application.setStreamsConfig(Map.of( - StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, "0", - ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000" - )); - } -} diff --git a/streams-bootstrap/src/test/resources/log4j2.xml b/streams-bootstrap/src/test/resources/log4j2.xml deleted file mode 100644 index a42d6e75..00000000 --- a/streams-bootstrap/src/test/resources/log4j2.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - -