diff --git a/.github/workflows/build-and-publish.yaml b/.github/workflows/build-and-publish.yaml index fefd208..be8f98b 100644 --- a/.github/workflows/build-and-publish.yaml +++ b/.github/workflows/build-and-publish.yaml @@ -9,6 +9,8 @@ jobs: build-and-publish: name: Java Gradle uses: bakdata/ci-templates/.github/workflows/java-gradle-library.yaml@1.39.1 + with: + java-version: 17 secrets: sonar-token: ${{ secrets.SONARCLOUD_TOKEN }} sonar-organization: ${{ secrets.SONARCLOUD_ORGANIZATION }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 6258a2b..2f4ea28 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -18,6 +18,7 @@ jobs: name: Java Gradle uses: bakdata/ci-templates/.github/workflows/java-gradle-release.yaml@1.39.1 with: + java-version: 17 release-type: "${{ inputs.release-type }}" secrets: github-email: "${{ secrets.GH_EMAIL }}" diff --git a/brute-force-connect/build.gradle.kts b/brute-force-connect/build.gradle.kts index 9caa5f3..a82a0b2 100644 --- a/brute-force-connect/build.gradle.kts +++ b/brute-force-connect/build.gradle.kts @@ -23,11 +23,10 @@ dependencies { testImplementation(group = "io.confluent", name = "kafka-streams-protobuf-serde", version = confluentVersion) testImplementation(group = "io.confluent", name = "kafka-streams-json-schema-serde", version = confluentVersion) - testImplementation(group = "com.adobe.testing", name = "s3mock-junit5", version = "2.1.8") { - exclude(group = "ch.qos.logback") - exclude(group = "org.apache.logging.log4j", module = "log4j-to-slf4j") - } - val fluentKafkaVersion = "2.5.1" + val testContainersVersion: String by project + testImplementation(group = "org.testcontainers", name = "junit-jupiter", version = testContainersVersion) + testImplementation(group = "org.testcontainers", name = "localstack", version = testContainersVersion) + val fluentKafkaVersion = "2.11.3" testImplementation( group = "com.bakdata.fluent-kafka-streams-tests", name = "schema-registry-mock-junit5", @@ -35,7 +34,7 @@ dependencies { ) testImplementation(group = "com.bakdata.kafka", name = "large-message-serde", version = largeMessageVersion) - testImplementation(group = "net.mguenther.kafka", name = "kafka-junit", version = kafkaVersion) { + testImplementation(group = "net.mguenther.kafka", name = "kafka-junit", version = "3.5.0") { exclude(group = "org.slf4j", module = "slf4j-log4j12") } testImplementation(group = "org.apache.kafka", name = "connect-file", version = kafkaVersion) diff --git a/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterIntegrationTest.java b/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterIntegrationTest.java index ee58957..b874cc2 100644 --- a/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterIntegrationTest.java +++ b/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterIntegrationTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2022 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -28,11 +28,11 @@ import static net.mguenther.kafka.junit.Wait.delay; import static org.assertj.core.api.Assertions.assertThat; -import com.adobe.testing.s3mock.junit5.S3MockExtension; import com.bakdata.schemaregistrymock.junit5.SchemaRegistryMockExtension; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import io.confluent.kafka.streams.serdes.avro.GenericAvroSerializer; import java.io.IOException; +import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; @@ -55,11 +55,47 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; - +import org.testcontainers.containers.localstack.LocalStackContainer; +import org.testcontainers.containers.localstack.LocalStackContainer.Service; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.CreateBucketRequest; + +@Testcontainers class BruteForceConverterIntegrationTest { - @RegisterExtension - static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent() - .withSecureConnection(false).build(); + + private static final DockerImageName LOCAL_STACK_IMAGE = DockerImageName.parse("localstack/localstack") + .withTag("1.3.1"); + @Container + private static final LocalStackContainer LOCAL_STACK_CONTAINER = new LocalStackContainer(LOCAL_STACK_IMAGE) + .withServices(Service.S3); + + static S3Client getS3Client() { + return S3Client.builder() + .endpointOverride(getEndpointOverride()) + .credentialsProvider(StaticCredentialsProvider.create(getCredentials())) + .region(getRegion()) + .build(); + } + + private static Region getRegion() { + return Region.of(LOCAL_STACK_CONTAINER.getRegion()); + } + + private static AwsBasicCredentials getCredentials() { + return AwsBasicCredentials.create( + LOCAL_STACK_CONTAINER.getAccessKey(), LOCAL_STACK_CONTAINER.getSecretKey() + ); + } + + private static URI getEndpointOverride() { + return LOCAL_STACK_CONTAINER.getEndpointOverride(Service.S3); + } private static final String BUCKET_NAME = "testbucket"; private static final String TOPIC = "input"; @RegisterExtension @@ -69,12 +105,13 @@ class BruteForceConverterIntegrationTest { private static Properties createS3BackedProperties() { final Properties properties = new Properties(); - properties.put(AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, "http://localhost:" + S3_MOCK.getHttpPort()); - properties.put(AbstractLargeMessageConfig.S3_REGION_CONFIG, "us-east-1"); - properties.put(AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, "foo"); - properties.put(AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, "bar"); - properties.put(AbstractLargeMessageConfig.S3_ENABLE_PATH_STYLE_ACCESS_CONFIG, true); - properties.put(AbstractLargeMessageConfig.BASE_PATH_CONFIG, String.format("s3://%s/", BUCKET_NAME)); + final AwsBasicCredentials credentials = getCredentials(); + properties.setProperty(AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, + getEndpointOverride().toString()); + properties.setProperty(AbstractLargeMessageConfig.S3_REGION_CONFIG, getRegion().id()); + properties.setProperty(AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, credentials.accessKeyId()); + properties.setProperty(AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, credentials.secretAccessKey()); + properties.setProperty(AbstractLargeMessageConfig.BASE_PATH_CONFIG, String.format("s3://%s/", BUCKET_NAME)); return properties; } @@ -85,7 +122,9 @@ private static String withValuePrefix(final Object config) { @BeforeEach void setUp() throws IOException { this.outputFile = Files.createTempFile("test", "temp"); - S3_MOCK.createS3Client().createBucket(BUCKET_NAME); + getS3Client().createBucket(CreateBucketRequest.builder() + .bucket(BUCKET_NAME) + .build()); this.kafkaCluster = this.createCluster(); this.kafkaCluster.start(); } @@ -130,13 +169,13 @@ private EmbeddedKafkaCluster createCluster() { private Properties config() { final Properties properties = new Properties(); - properties.put(ConnectorConfig.NAME_CONFIG, "test"); - properties.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, "FileStreamSink"); - properties.put(SinkConnector.TOPICS_CONFIG, TOPIC); - properties.put(FileStreamSinkConnector.FILE_CONFIG, this.outputFile.toString()); - properties.put(ConnectorConfig.KEY_CONVERTER_CLASS_CONFIG, StringConverter.class.getName()); - properties.put(ConnectorConfig.VALUE_CONVERTER_CLASS_CONFIG, BruteForceConverter.class.getName()); - properties.put(withValuePrefix(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG), + properties.setProperty(ConnectorConfig.NAME_CONFIG, "test"); + properties.setProperty(ConnectorConfig.CONNECTOR_CLASS_CONFIG, "FileStreamSink"); + properties.setProperty(SinkConnector.TOPICS_CONFIG, TOPIC); + properties.setProperty(FileStreamSinkConnector.FILE_CONFIG, this.outputFile.toString()); + properties.setProperty(ConnectorConfig.KEY_CONVERTER_CLASS_CONFIG, StringConverter.class.getName()); + properties.setProperty(ConnectorConfig.VALUE_CONVERTER_CLASS_CONFIG, BruteForceConverter.class.getName()); + properties.setProperty(withValuePrefix(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG), this.schemaRegistry.getUrl()); createS3BackedProperties().forEach((key, value) -> properties.put(withValuePrefix(key), value)); return properties; @@ -154,7 +193,7 @@ private Properties createBackedStringProducerProperties(final boolean shouldBack private Properties createBaseProducerProperties() { final Properties properties = new Properties(); properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaCluster.getBrokerList()); + properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaCluster.getBrokerList()); return properties; } @@ -167,7 +206,7 @@ private Properties createStringProducerProperties() { private Properties createAvroProducerProperties() { final Properties properties = this.createBaseProducerProperties(); properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, GenericAvroSerializer.class); - properties.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, this.schemaRegistry.getUrl()); + properties.setProperty(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, this.schemaRegistry.getUrl()); return properties; } } diff --git a/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterTest.java b/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterTest.java index 9ec37e9..0215dce 100644 --- a/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterTest.java +++ b/brute-force-connect/src/test/java/com/bakdata/kafka/BruteForceConverterTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2022 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -30,7 +30,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; -import com.adobe.testing.s3mock.junit5.S3MockExtension; import com.bakdata.schemaregistrymock.SchemaRegistryMock; import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.DescriptorValidationException; @@ -49,6 +48,7 @@ import io.confluent.kafka.streams.serdes.avro.GenericAvroSerializer; import io.confluent.kafka.streams.serdes.json.KafkaJsonSchemaSerde; import io.confluent.kafka.streams.serdes.protobuf.KafkaProtobufSerde; +import java.net.URI; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -76,21 +76,57 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; - +import org.testcontainers.containers.localstack.LocalStackContainer; +import org.testcontainers.containers.localstack.LocalStackContainer.Service; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.CreateBucketRequest; + +@Testcontainers class BruteForceConverterTest { - @RegisterExtension - static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent().withSecureConnection(false).build(); - private static final String TOPIC = "topic"; - final SchemaRegistryMock schemaRegistry = new SchemaRegistryMock(List.of( + + private static final DockerImageName LOCAL_STACK_IMAGE = DockerImageName.parse("localstack/localstack") + .withTag("1.3.1"); + @Container + private static final LocalStackContainer LOCAL_STACK_CONTAINER = new LocalStackContainer(LOCAL_STACK_IMAGE) + .withServices(Service.S3); + private final SchemaRegistryMock schemaRegistry = new SchemaRegistryMock(List.of( new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider() )); + static S3Client getS3Client() { + return S3Client.builder() + .endpointOverride(getEndpointOverride()) + .credentialsProvider(StaticCredentialsProvider.create(getCredentials())) + .region(getRegion()) + .build(); + } + + private static Region getRegion() { + return Region.of(LOCAL_STACK_CONTAINER.getRegion()); + } + + private static AwsBasicCredentials getCredentials() { + return AwsBasicCredentials.create( + LOCAL_STACK_CONTAINER.getAccessKey(), LOCAL_STACK_CONTAINER.getSecretKey() + ); + } + private static final String TOPIC = "topic"; + + private static URI getEndpointOverride() { + return LOCAL_STACK_CONTAINER.getEndpointOverride(Service.S3); + } + static Stream generateGenericAvroSerializers() { return generateSerializers(new GenericAvroSerde()); } @@ -125,7 +161,7 @@ private static DynamicMessage generateDynamicMessage() throws DescriptorValidati final DynamicSchema dynamicSchema = DynamicSchema.newBuilder() .setName("file") .addMessageDefinition(MessageDefinition.newBuilder("Test") - .addField("", "string", "testId", 1, null, null, null) + .addField(null, "string", "testId", 1, null, null) .build()) .build(); final Descriptor test = dynamicSchema.getMessageDescriptor("Test"); @@ -154,12 +190,12 @@ private static SerializerFactory createLargeMessageSerializer(final Serde } private static Map getS3EndpointConfig() { + final AwsBasicCredentials credentials = getCredentials(); return Map.of( - AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, "http://localhost:" + S3_MOCK.getHttpPort(), - AbstractLargeMessageConfig.S3_REGION_CONFIG, "us-east-1", - AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, "foo", - AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, "bar", - AbstractLargeMessageConfig.S3_ENABLE_PATH_STYLE_ACCESS_CONFIG, true + AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, getEndpointOverride().toString(), + AbstractLargeMessageConfig.S3_REGION_CONFIG, getRegion().id(), + AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, credentials.accessKeyId(), + AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, credentials.secretAccessKey() ); } @@ -287,14 +323,14 @@ void shouldConvertByteKeys(final SerializerFactory factory) { } @ParameterizedTest - @MethodSource("generateProtobufSerializers") - void shouldConvertJsonKeys(final SerializerFactory factory) throws DescriptorValidationException { - final DynamicMessage value = generateDynamicMessage(); + @MethodSource("generateJsonSerializers") + void shouldConvertJsonKeys(final SerializerFactory factory) { + final JsonTestRecord value = new JsonTestRecord("test"); final Map config = Map.of( BruteForceConverterConfig.CONVERTER_CONFIG, - List.of(AvroConverter.class.getName(), ProtobufConverter.class.getName()) + List.of(AvroConverter.class.getName(), JsonSchemaConverter.class.getName()) ); - this.testValueConversion(factory, new KafkaProtobufSerializer<>(), value, config, new ProtobufConverter()); + this.testKeyConversion(factory, new KafkaJsonSchemaSerializer<>(), value, config, new JsonSchemaConverter()); } @ParameterizedTest @@ -359,7 +395,9 @@ private void testConversion(final SerializerFactory factory, final Serial final T value, final Map originals, final Converter expectedConverter, final boolean isKey) { final String bucket = "bucket"; - S3_MOCK.createS3Client().createBucket(bucket); + getS3Client().createBucket(CreateBucketRequest.builder() + .bucket(bucket) + .build()); final Map config = new HashMap<>(originals); config.put(SCHEMA_REGISTRY_URL_CONFIG, this.schemaRegistry.getUrl()); config.put(AbstractLargeMessageConfig.BASE_PATH_CONFIG, "s3://" + bucket + "/"); diff --git a/brute-force-serde/build.gradle.kts b/brute-force-serde/build.gradle.kts index 8c4cee5..1976cbf 100644 --- a/brute-force-serde/build.gradle.kts +++ b/brute-force-serde/build.gradle.kts @@ -4,7 +4,7 @@ import com.google.protobuf.gradle.protoc description = "Kafka SerDe that deserializes messages of an unknown serialization format" plugins { - id("com.github.davidmc24.gradle.plugin.avro") version "1.2.1" + id("com.github.davidmc24.gradle.plugin.avro") version "1.9.1" id("com.google.protobuf") version "0.8.18" } @@ -26,13 +26,12 @@ dependencies { testImplementation(group = "io.confluent", name = "kafka-streams-protobuf-serde", version = confluentVersion) testImplementation(group = "io.confluent", name = "kafka-streams-json-schema-serde", version = confluentVersion) - testImplementation(group = "com.adobe.testing", name = "s3mock-junit5", version = "2.1.8") { - exclude(group = "ch.qos.logback") - exclude(group = "org.apache.logging.log4j", module = "log4j-to-slf4j") - } + val testContainersVersion: String by project + testImplementation(group = "org.testcontainers", name = "junit-jupiter", version = testContainersVersion) + testImplementation(group = "org.testcontainers", name = "localstack", version = testContainersVersion) testImplementation(group = "org.jooq", name = "jool", version = "0.9.14") - val fluentKafkaVersion = "2.5.1" + val fluentKafkaVersion = "2.11.3" testImplementation( group = "com.bakdata.fluent-kafka-streams-tests", name = "fluent-kafka-streams-tests-junit5", diff --git a/brute-force-serde/src/test/java/com/bakdata/kafka/BruteForceDeserializerTest.java b/brute-force-serde/src/test/java/com/bakdata/kafka/BruteForceDeserializerTest.java index e75f3d1..8b1517f 100644 --- a/brute-force-serde/src/test/java/com/bakdata/kafka/BruteForceDeserializerTest.java +++ b/brute-force-serde/src/test/java/com/bakdata/kafka/BruteForceDeserializerTest.java @@ -1,7 +1,7 @@ /* * MIT License * - * Copyright (c) 2022 bakdata + * Copyright (c) 2024 bakdata * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -28,7 +28,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; -import com.adobe.testing.s3mock.junit5.S3MockExtension; import com.bakdata.Id; import com.bakdata.fluent_kafka_streams_tests.TestTopology; import com.bakdata.kafka.Test.ProtobufRecord; @@ -41,6 +40,7 @@ import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; import io.confluent.kafka.streams.serdes.json.KafkaJsonSchemaSerde; import io.confluent.kafka.streams.serdes.protobuf.KafkaProtobufSerde; +import java.net.URI; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -71,15 +71,50 @@ import org.jooq.lambda.Seq; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; - +import org.testcontainers.containers.localstack.LocalStackContainer; +import org.testcontainers.containers.localstack.LocalStackContainer.Service; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.CreateBucketRequest; + +@Testcontainers class BruteForceDeserializerTest { - @RegisterExtension - static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent() - .withSecureConnection(false).build(); + + private static final DockerImageName LOCAL_STACK_IMAGE = DockerImageName.parse("localstack/localstack") + .withTag("1.3.1"); + @Container + private static final LocalStackContainer LOCAL_STACK_CONTAINER = new LocalStackContainer(LOCAL_STACK_IMAGE) + .withServices(Service.S3); + + static S3Client getS3Client() { + return S3Client.builder() + .endpointOverride(getEndpointOverride()) + .credentialsProvider(StaticCredentialsProvider.create(getCredentials())) + .region(getRegion()) + .build(); + } + + private static Region getRegion() { + return Region.of(LOCAL_STACK_CONTAINER.getRegion()); + } + + private static AwsBasicCredentials getCredentials() { + return AwsBasicCredentials.create( + LOCAL_STACK_CONTAINER.getAccessKey(), LOCAL_STACK_CONTAINER.getSecretKey() + ); + } + + private static URI getEndpointOverride() { + return LOCAL_STACK_CONTAINER.getEndpointOverride(Service.S3); + } private static final String INPUT_TOPIC = "input"; private static final String OUTPUT_TOPIC = "output"; @@ -177,12 +212,12 @@ private static SerdeFactory createLargeMessageSerde(final Serde inner, } private static Map getS3EndpointConfig() { + final AwsBasicCredentials credentials = getCredentials(); return Map.of( - AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, "http://localhost:" + S3_MOCK.getHttpPort(), - AbstractLargeMessageConfig.S3_REGION_CONFIG, "us-east-1", - AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, "foo", - AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, "bar", - AbstractLargeMessageConfig.S3_ENABLE_PATH_STYLE_ACCESS_CONFIG, true + AbstractLargeMessageConfig.S3_ENDPOINT_CONFIG, getEndpointOverride().toString(), + AbstractLargeMessageConfig.S3_REGION_CONFIG, getRegion().id(), + AbstractLargeMessageConfig.S3_ACCESS_KEY_CONFIG, credentials.accessKeyId(), + AbstractLargeMessageConfig.S3_SECRET_KEY_CONFIG, credentials.secretAccessKey() ); } @@ -329,7 +364,7 @@ void shouldReadGenericAvroKeys(final SerdeFactory factory) { void shouldReadBytesValues(final SerdeFactory factory) { final Properties properties = new Properties(); // this makes StringDeserializer fail - properties.put("value.deserializer.encoding", "missing"); + properties.setProperty("value.deserializer.encoding", "missing"); final byte[] value = {1, 0}; this.testValueTopology(factory, properties, Serdes.ByteArray(), value); @@ -340,7 +375,7 @@ void shouldReadBytesValues(final SerdeFactory factory) { void shouldReadBytesKeys(final SerdeFactory factory) { final Properties properties = new Properties(); // this makes StringDeserializer fail - properties.put("key.deserializer.encoding", "missing"); + properties.setProperty("key.deserializer.encoding", "missing"); final byte[] value = {1, 0}; this.testKeyTopology(factory, properties, Serdes.ByteArray(), value); @@ -389,7 +424,9 @@ void shouldReadJsonKeys(final SerdeFactory factory) { private void testValueTopology(final SerdeFactory factory, final Properties properties, final Serde serde, final T value) { final String bucket = "bucket"; - S3_MOCK.createS3Client().createBucket(bucket); + getS3Client().createBucket(CreateBucketRequest.builder() + .bucket(bucket) + .build()); this.createTopology(p -> createValueTopology(p, serde.getClass()), properties); final Map config = Map.of( @@ -416,7 +453,9 @@ private void testValueTopology(final SerdeFactory factory, final Properti private void testKeyTopology(final SerdeFactory factory, final Properties properties, final Serde serde, final T value) { final String bucket = "bucket"; - S3_MOCK.createS3Client().createBucket(bucket); + getS3Client().createBucket(CreateBucketRequest.builder() + .bucket(bucket) + .build()); this.createTopology(p -> createKeyTopology(p, serde.getClass()), properties); final Map config = Map.of( diff --git a/build.gradle.kts b/build.gradle.kts index e216cfd..e77ed2a 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -1,12 +1,9 @@ -import net.researchgate.release.GitAdapter.GitConfig -import net.researchgate.release.ReleaseExtension - plugins { - id("net.researchgate.release") version "2.8.1" - id("com.bakdata.sonar") version "1.1.7" - id("com.bakdata.sonatype") version "1.1.7" - id("org.hildan.github.changelog") version "0.8.0" - id("io.freefair.lombok") version "5.3.3.3" + id("net.researchgate.release") version "3.0.2" + id("com.bakdata.sonar") version "1.1.9" + id("com.bakdata.sonatype") version "1.1.9" + id("org.hildan.github.changelog") version "1.12.1" + id("io.freefair.lombok") version "6.6.3" } allprojects { @@ -47,7 +44,7 @@ subprojects { apply(plugin = "java-library") apply(plugin = "io.freefair.lombok") - configure { + configure { sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 } @@ -60,17 +57,9 @@ subprojects { "testImplementation"(group = "org.junit.jupiter", name = "junit-jupiter-api", version = junitVersion) "testImplementation"(group = "org.junit.jupiter", name = "junit-jupiter-params", version = junitVersion) "testRuntimeOnly"(group = "org.junit.jupiter", name = "junit-jupiter-engine", version = junitVersion) - "testImplementation"(group = "org.assertj", name = "assertj-core", version = "3.20.2") + "testImplementation"(group = "org.assertj", name = "assertj-core", version = "3.25.1") val log4jVersion: String by project - "testImplementation"(group = "org.apache.logging.log4j", name = "log4j-slf4j-impl", version = log4jVersion) - } -} - -fun ReleaseExtension.git(configure: GitConfig.() -> Unit) = (getProperty("git") as GitConfig).configure() - -release { - git { - requireBranch = "main" + "testImplementation"(group = "org.apache.logging.log4j", name = "log4j-slf4j2-impl", version = log4jVersion) } } diff --git a/gradle.properties b/gradle.properties index 16d3fe6..8ba5706 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,9 +1,10 @@ version=1.2.1-SNAPSHOT org.gradle.caching=true org.gradle.parallel=true -kafkaVersion=3.0.0 -confluentVersion=7.0.1 -junitVersion=5.7.2 -slf4jVersion=1.7.32 -largeMessageVersion=2.3.0 -log4jVersion=2.17.2 +kafkaVersion=3.5.2 +confluentVersion=7.5.1 +junitVersion=5.10.1 +slf4jVersion=2.0.10 +largeMessageVersion=2.6.0 +log4jVersion=2.22.1 +testContainersVersion=1.19.3 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 5c2d1cf..e708b1c 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 1f3fdbc..f42e62f 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index b0d6d0a..4f906e0 100755 --- a/gradlew +++ b/gradlew @@ -7,7 +7,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -82,6 +82,7 @@ esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then @@ -125,10 +126,11 @@ if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath @@ -154,19 +156,19 @@ if $cygwin ; then else eval `echo args$i`="\"$arg\"" fi - i=$((i+1)) + i=`expr $i + 1` done case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi @@ -175,14 +177,9 @@ save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } -APP_ARGS=$(save "$@") +APP_ARGS=`save "$@"` # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index 9991c50..107acd3 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -5,7 +5,7 @@ @rem you may not use this file except in compliance with the License. @rem You may obtain a copy of the License at @rem -@rem http://www.apache.org/licenses/LICENSE-2.0 +@rem https://www.apache.org/licenses/LICENSE-2.0 @rem @rem Unless required by applicable law or agreed to in writing, software @rem distributed under the License is distributed on an "AS IS" BASIS, @@ -29,6 +29,9 @@ if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @@ -37,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -51,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -61,28 +64,14 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell