diff --git a/build.sbt b/build.sbt
index 76aa0a696..c631398b6 100644
--- a/build.sbt
+++ b/build.sbt
@@ -1,5 +1,7 @@
import Dependencies.globalExcludeDeps
import Dependencies.gson
+import Dependencies.bouncyCastle
+
import Settings.*
import sbt.Keys.libraryDependencies
import sbt.*
@@ -18,8 +20,9 @@ lazy val subProjects: Seq[Project] = Seq(
`azure-documentdb`,
`azure-datalake`,
cassandra,
- elastic6,
- elastic7,
+ `elastic-common`,
+ opensearch,
+ elastic8,
ftp,
`gcp-storage`,
http,
@@ -219,18 +222,17 @@ lazy val cassandra = (project in file("kafka-connect-cassandra"))
.configureFunctionalTests()
.enablePlugins(PackPlugin)
-lazy val elastic6 = (project in file("kafka-connect-elastic6"))
+lazy val `elastic-common` = (project in file("kafka-connect-elastic-common"))
.dependsOn(common)
.dependsOn(`sql-common`)
.dependsOn(`test-common` % "fun->compile")
.settings(
settings ++
Seq(
- name := "kafka-connect-elastic6",
+ name := "kafka-connect-elastic-common",
description := "Kafka Connect compatible connectors to move data between Kafka and popular data stores",
- libraryDependencies ++= baseDeps ++ kafkaConnectElastic6Deps,
+ libraryDependencies ++= baseDeps ++ kafkaConnectElasticBaseDeps,
publish / skip := true,
- FunctionalTest / baseDirectory := (LocalRootProject / baseDirectory).value,
packExcludeJars := Seq(
"scala-.*\\.jar",
"zookeeper-.*\\.jar",
@@ -239,20 +241,20 @@ lazy val elastic6 = (project in file("kafka-connect-elastic6"))
)
.configureAssembly(true)
.configureTests(baseTestDeps)
- .configureIntegrationTests(kafkaConnectElastic6TestDeps)
+ .configureIntegrationTests(kafkaConnectElastic8TestDeps)
.configureFunctionalTests()
- .enablePlugins(PackPlugin)
+ .disablePlugins(PackPlugin)
-lazy val elastic7 = (project in file("kafka-connect-elastic7"))
+lazy val elastic8 = (project in file("kafka-connect-elastic8"))
.dependsOn(common)
- .dependsOn(`sql-common`)
- .dependsOn(`test-common` % "fun->compile")
+ .dependsOn(`elastic-common`)
+ .dependsOn(`test-common` % "fun->compile;it->compile")
.settings(
settings ++
Seq(
- name := "kafka-connect-elastic7",
+ name := "kafka-connect-elastic8",
description := "Kafka Connect compatible connectors to move data between Kafka and popular data stores",
- libraryDependencies ++= baseDeps ++ kafkaConnectElastic7Deps,
+ libraryDependencies ++= baseDeps ++ kafkaConnectElastic8Deps,
publish / skip := true,
packExcludeJars := Seq(
"scala-.*\\.jar",
@@ -262,10 +264,33 @@ lazy val elastic7 = (project in file("kafka-connect-elastic7"))
)
.configureAssembly(true)
.configureTests(baseTestDeps)
- .configureIntegrationTests(kafkaConnectElastic7TestDeps)
+ .configureIntegrationTests(kafkaConnectElastic8TestDeps)
.configureFunctionalTests()
.enablePlugins(PackPlugin)
+lazy val opensearch = (project in file("kafka-connect-opensearch"))
+ .dependsOn(common)
+ .dependsOn(`elastic-common`)
+ .dependsOn(`test-common` % "fun->compile;it->compile")
+ .settings(
+ settings ++
+ Seq(
+ name := "kafka-connect-opensearch",
+ description := "Kafka Connect compatible connectors to move data between Kafka and popular data stores",
+ libraryDependencies ++= baseDeps ++ kafkaConnectOpenSearchDeps,
+ publish / skip := true,
+ packExcludeJars := Seq(
+ "scala-.*\\.jar",
+ "zookeeper-.*\\.jar",
+ ),
+ ),
+ )
+ .configureAssembly(false)
+ .configureTests(baseTestDeps)
+ //.configureIntegrationTests(kafkaConnectOpenSearchTestDeps)
+ .configureFunctionalTests(bouncyCastle)
+ .enablePlugins(PackPlugin)
+
lazy val http = (project in file("kafka-connect-http"))
.dependsOn(common)
//.dependsOn(`test-common` % "fun->compile")
diff --git a/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala b/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala
index 8acbbb14a..89eb08629 100644
--- a/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala
+++ b/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package io.lenses.streamreactor.connect.azure.documentdb.sink
+import cats.implicits.toBifunctorOps
import io.lenses.streamreactor.common.config.Helpers
import io.lenses.streamreactor.common.utils.JarManifest
import io.lenses.streamreactor.connect.azure.documentdb.DocumentClientProvider
@@ -100,7 +101,7 @@ class DocumentDbSinkConnector private[sink] (builder: DocumentDbSinkSettings =>
configProps = props
//check input topics
- Helpers.checkInputTopics(DocumentDbConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(DocumentDbConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
val settings = DocumentDbSinkSettings(config)
diff --git a/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/CassandraConnection.scala b/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/CassandraConnection.scala
index 8bffaf6da..1561c1da2 100644
--- a/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/CassandraConnection.scala
+++ b/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/CassandraConnection.scala
@@ -15,9 +15,9 @@
*/
package io.lenses.streamreactor.connect.cassandra
-import io.lenses.streamreactor.common.config.SSLConfig
-import io.lenses.streamreactor.common.config.SSLConfigContext
import io.lenses.streamreactor.connect.cassandra.config.CassandraConfigConstants
+import io.lenses.streamreactor.connect.cassandra.config.SSLConfig
+import io.lenses.streamreactor.connect.cassandra.config.SSLConfigContext
import io.lenses.streamreactor.connect.cassandra.config.LoadBalancingPolicy
import com.datastax.driver.core.Cluster.Builder
import com.datastax.driver.core.policies.DCAwareRoundRobinPolicy
diff --git a/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/config/SSLConfigContext.scala b/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/config/SSLConfigContext.scala
new file mode 100644
index 000000000..9fbc8b10c
--- /dev/null
+++ b/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/config/SSLConfigContext.scala
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.cassandra.config
+
+import java.io.FileInputStream
+import java.security.KeyStore
+import java.security.SecureRandom
+import javax.net.ssl._
+
+/**
+ * Created by andrew@datamountaineer.com on 14/04/16.
+ * stream-reactor
+ */
+object SSLConfigContext {
+ def apply(config: SSLConfig): SSLContext =
+ getSSLContext(config)
+
+ /**
+ * Get a SSL Connect for a given set of credentials
+ *
+ * @param config An SSLConfig containing key and truststore credentials
+ * @return a SSLContext
+ */
+ def getSSLContext(config: SSLConfig): SSLContext = {
+ val useClientCertAuth = config.useClientCert
+
+ //is client certification authentication set
+ val keyManagers: Array[KeyManager] = if (useClientCertAuth) {
+ getKeyManagers(config)
+ } else {
+ Array[KeyManager]()
+ }
+
+ val ctx: SSLContext = SSLContext.getInstance("SSL")
+ val trustManagers = getTrustManagers(config)
+ ctx.init(keyManagers, trustManagers, new SecureRandom())
+ ctx
+ }
+
+ /**
+ * Get an array of Trust Managers
+ *
+ * @param config An SSLConfig containing key and truststore credentials
+ * @return An Array of TrustManagers
+ */
+ def getTrustManagers(config: SSLConfig): Array[TrustManager] = {
+ val tsf = new FileInputStream(config.trustStorePath)
+ val ts = KeyStore.getInstance(config.trustStoreType)
+ ts.load(tsf, config.trustStorePass.toCharArray)
+ val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
+ tmf.init(ts)
+ tmf.getTrustManagers
+ }
+
+ /**
+ * Get an array of Key Managers
+ *
+ * @param config An SSLConfig containing key and truststore credentials
+ * @return An Array of KeyManagers
+ */
+ def getKeyManagers(config: SSLConfig): Array[KeyManager] = {
+ require(config.keyStorePath.nonEmpty, "Key store path is not set!")
+ require(config.keyStorePass.nonEmpty, "Key store password is not set!")
+ val ksf = new FileInputStream(config.keyStorePath.get)
+ val ks = KeyStore.getInstance(config.keyStoreType)
+ ks.load(ksf, config.keyStorePass.get.toCharArray)
+ val kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
+ kmf.init(ks, config.keyStorePass.get.toCharArray)
+ kmf.getKeyManagers
+ }
+
+}
+
+/**
+ * Class for holding key and truststore settings
+ */
+case class SSLConfig(
+ trustStorePath: String,
+ trustStorePass: String,
+ keyStorePath: Option[String],
+ keyStorePass: Option[String],
+ useClientCert: Boolean = false,
+ keyStoreType: String = "JKS",
+ trustStoreType: String = "JKS",
+)
diff --git a/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala b/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala
index a6a264e54..0f3472af3 100644
--- a/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala
+++ b/kafka-connect-cassandra/src/main/scala/io/lenses/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala
@@ -15,18 +15,18 @@
*/
package io.lenses.streamreactor.connect.cassandra.sink
+import cats.implicits.toBifunctorOps
+import com.typesafe.scalalogging.StrictLogging
import io.lenses.streamreactor.common.config.Helpers
import io.lenses.streamreactor.common.utils.JarManifest
-
-import java.util
import io.lenses.streamreactor.connect.cassandra.config.CassandraConfigConstants
import io.lenses.streamreactor.connect.cassandra.config.CassandraConfigSink
-import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.errors.ConnectException
import org.apache.kafka.connect.sink.SinkConnector
+import java.util
import scala.jdk.CollectionConverters.MapHasAsScala
import scala.jdk.CollectionConverters.SeqHasAsJava
import scala.util.Failure
@@ -66,7 +66,7 @@ class CassandraSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
//check input topics
- Helpers.checkInputTopics(CassandraConfigConstants.KCQL, props.asScala.toMap)
+ Helpers.checkInputTopics(CassandraConfigConstants.KCQL, props.asScala.toMap).leftMap(throw _)
configProps = props
Try(new CassandraConfigSink(props.asScala.toMap)) match {
case Failure(f) =>
diff --git a/kafka-connect-cassandra/src/test/scala/io/lenses/streamreactor/connect/cassandra/config/TestSSLConfigContext.scala b/kafka-connect-cassandra/src/test/scala/io/lenses/streamreactor/connect/cassandra/config/TestSSLConfigContext.scala
new file mode 100644
index 000000000..e0bf14d62
--- /dev/null
+++ b/kafka-connect-cassandra/src/test/scala/io/lenses/streamreactor/connect/cassandra/config/TestSSLConfigContext.scala
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.cassandra.config
+
+import org.scalatest.BeforeAndAfter
+import org.scalatest.matchers.should.Matchers
+import org.scalatest.wordspec.AnyWordSpec
+
+import javax.net.ssl.KeyManager
+import javax.net.ssl.SSLContext
+import javax.net.ssl.TrustManager
+
+/**
+ * Created by andrew@datamountaineer.com on 19/04/16.
+ * stream-reactor
+ */
+class TestSSLConfigContext extends AnyWordSpec with Matchers with BeforeAndAfter {
+ var sslConfig: SSLConfig = null
+ var sslConfigNoClient: SSLConfig = null
+
+ before {
+ val trustStorePath = getClass.getResource("/stc_truststore.jks").getPath
+ val keystorePath = getClass.getResource("/stc_keystore.jks").getPath
+ val trustStorePassword = "erZHDS9Eo0CcNo"
+ val keystorePassword = "8yJQLUnGkwZxOw"
+ sslConfig = SSLConfig(trustStorePath, trustStorePassword, Some(keystorePath), Some(keystorePassword), true)
+ sslConfigNoClient = SSLConfig(trustStorePath, trustStorePassword, Some(keystorePath), Some(keystorePassword), false)
+ }
+
+ "SSLConfigContext" should {
+ "should return an Array of KeyManagers" in {
+ val keyManagers = SSLConfigContext.getKeyManagers(sslConfig)
+ keyManagers.length shouldBe 1
+ val entry = keyManagers.head
+ entry shouldBe a[KeyManager]
+ }
+
+ "should return an Array of TrustManagers" in {
+ val trustManager = SSLConfigContext.getTrustManagers(sslConfig)
+ trustManager.length shouldBe 1
+ val entry = trustManager.head
+ entry shouldBe a[TrustManager]
+ }
+
+ "should return a SSLContext" in {
+ val context = SSLConfigContext(sslConfig)
+ context.getProtocol shouldBe "SSL"
+ context shouldBe a[SSLContext]
+ }
+ }
+}
diff --git a/kafka-connect-common/src/main/scala/io/lenses/streamreactor/common/config/Helpers.scala b/kafka-connect-common/src/main/scala/io/lenses/streamreactor/common/config/Helpers.scala
index 3624a7d5b..f4e444870 100644
--- a/kafka-connect-common/src/main/scala/io/lenses/streamreactor/common/config/Helpers.scala
+++ b/kafka-connect-common/src/main/scala/io/lenses/streamreactor/common/config/Helpers.scala
@@ -15,6 +15,7 @@
*/
package io.lenses.streamreactor.common.config
+import cats.implicits.catsSyntaxEitherId
import io.lenses.kcql.Kcql
import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.common.config.ConfigException
@@ -26,11 +27,11 @@ import org.apache.kafka.common.config.ConfigException
object Helpers extends StrictLogging {
- def checkInputTopics(kcqlConstant: String, props: Map[String, String]): Boolean = {
+ def checkInputTopics(kcqlConstant: String, props: Map[String, String]): Either[Throwable, Unit] = {
val topics = props("topics").split(",").map(t => t.trim).toSet
val raw = props(kcqlConstant)
if (raw.isEmpty) {
- throw new ConfigException(s"Missing $kcqlConstant")
+ return new ConfigException(s"Missing $kcqlConstant").asLeft
}
val kcql = raw.split(";").map(r => Kcql.parse(r)).toSet
val sources = kcql.map(k => k.getSource)
@@ -38,20 +39,20 @@ object Helpers extends StrictLogging {
if (!res) {
val missing = topics.diff(sources)
- throw new ConfigException(
+ return new ConfigException(
s"Mandatory `topics` configuration contains topics not set in $kcqlConstant: ${missing}, kcql contains $sources",
- )
+ ).asLeft
}
val res1 = sources.subsetOf(topics)
if (!res1) {
val missing = topics.diff(sources)
- throw new ConfigException(
+ return new ConfigException(
s"$kcqlConstant configuration contains topics not set in mandatory `topic` configuration: ${missing}, kcql contains $sources",
- )
+ ).asLeft
}
- true
+ ().asRight
}
}
diff --git a/kafka-connect-common/src/main/scala/io/lenses/streamreactor/connect/security/StoreInfo.scala b/kafka-connect-common/src/main/scala/io/lenses/streamreactor/connect/security/StoreInfo.scala
new file mode 100644
index 000000000..38bf3ceb4
--- /dev/null
+++ b/kafka-connect-common/src/main/scala/io/lenses/streamreactor/connect/security/StoreInfo.scala
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.security
+
+import cats.implicits.catsSyntaxOptionId
+import cats.implicits.none
+import io.lenses.streamreactor.common.config.base.traits.BaseConfig
+import org.apache.kafka.common.config.SslConfigs
+
+import java.io.FileInputStream
+import java.security.KeyStore
+import javax.net.ssl.KeyManagerFactory
+import javax.net.ssl.SSLContext
+import javax.net.ssl.TrustManagerFactory
+
+case class StoreInfo(
+ storePath: String,
+ storeType: Option[String],
+ storePassword: Option[String] = None,
+)
+
+case class StoresInfo(
+ trustStore: Option[StoreInfo] = None,
+ keyStore: Option[StoreInfo] = None,
+) {
+ def toSslContext: Option[SSLContext] = {
+ val maybeTrustFactory: Option[TrustManagerFactory] = trustStore.map {
+ case StoreInfo(path, storeType, password) =>
+ trustManagers(path, storeType, password)
+ }
+ val maybeKeyFactory: Option[KeyManagerFactory] = keyStore.map {
+ case StoreInfo(path, storeType, password) =>
+ keyManagers(path, storeType, password)
+ }
+
+ if (maybeTrustFactory.nonEmpty || maybeKeyFactory.nonEmpty) {
+ val sslContext = SSLContext.getInstance("TLS")
+ sslContext.init(
+ maybeKeyFactory.map(_.getKeyManagers).orNull,
+ maybeTrustFactory.map(_.getTrustManagers).orNull,
+ null,
+ )
+ sslContext.some
+ } else {
+ none
+ }
+ }
+
+ private def trustManagers(path: String, storeType: Option[String], password: Option[String]) = {
+ val truststore = KeyStore.getInstance(storeType.map(_.toUpperCase).getOrElse("JKS"))
+ val truststoreStream = new FileInputStream(path)
+ truststore.load(truststoreStream, password.getOrElse("").toCharArray)
+
+ val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
+ trustManagerFactory.init(truststore)
+ trustManagerFactory
+ }
+
+ private def keyManagers(path: String, storeType: Option[String], password: Option[String]): KeyManagerFactory = {
+ val keyStore = KeyStore.getInstance(storeType.map(_.toUpperCase).getOrElse("JKS"))
+ val truststoreStream = new FileInputStream(path)
+ keyStore.load(truststoreStream, password.getOrElse("").toCharArray)
+
+ val keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
+ keyManagerFactory.init(keyStore, password.getOrElse("").toCharArray)
+ keyManagerFactory
+ }
+}
+
+object StoresInfo {
+ def apply(config: BaseConfig): StoresInfo = {
+ val trustStore = for {
+ storePath <- Option(config.getString(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG))
+ storeType = Option(config.getString(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG))
+ storePassword = Option(config.getPassword(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)).map(_.value())
+ } yield StoreInfo(storePath, storeType, storePassword)
+ val keyStore = for {
+ storePath <- Option(config.getString(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
+ storeType = Option(config.getString(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG))
+ storePassword = Option(config.getPassword(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)).map(_.value())
+ } yield StoreInfo(storePath, storeType, storePassword)
+
+ StoresInfo(trustStore, keyStore)
+ }
+}
diff --git a/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestHelpers.scala b/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestHelpers.scala
index 4ed4ce4be..7f4504dd7 100644
--- a/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestHelpers.scala
+++ b/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestHelpers.scala
@@ -17,12 +17,14 @@ package io.lenses.streamreactor.common.config
import io.lenses.streamreactor.common.TestUtilsBase
import org.apache.kafka.common.config.ConfigException
+import org.scalatest.EitherValues
+import org.scalatest.matchers.should.Matchers
/**
* Created by andrew@datamountaineer.com on 23/08/2017.
* kafka-connect-common
*/
-class TestHelpers extends TestUtilsBase {
+class TestHelpers extends TestUtilsBase with EitherValues with Matchers {
val kcqlConstant: String = "myconnector.kcql"
@@ -31,18 +33,15 @@ class TestHelpers extends TestUtilsBase {
s"$kcqlConstant" -> "insert into table select * from t1;insert into table2 select * from t2",
)
- intercept[ConfigException] {
- Helpers.checkInputTopics(kcqlConstant, props)
- }
+ Helpers.checkInputTopics(kcqlConstant, props).left.value should be(a[ConfigException])
}
"should throw exception if topics not specified in kcql" in {
val props = Map("topics" -> "t1,t2", s"$kcqlConstant" -> "insert into table select * from t1")
- intercept[ConfigException] {
- Helpers.checkInputTopics(kcqlConstant, props)
- }
+ Helpers.checkInputTopics(kcqlConstant, props).left.value should be(a[ConfigException])
+
}
"should not throw exception if all good" in {
@@ -50,8 +49,7 @@ class TestHelpers extends TestUtilsBase {
s"$kcqlConstant" -> "insert into table select * from t1;insert into table2 select * from t2",
)
- val res = Helpers.checkInputTopics(kcqlConstant, props)
- res shouldBe true
+ Helpers.checkInputTopics(kcqlConstant, props).value should be(())
}
"should add topics involved in kcql error to message" in {
@@ -59,10 +57,8 @@ class TestHelpers extends TestUtilsBase {
s"$kcqlConstant" -> "insert into table select time,c1,c2 from topic1 WITH TIMESTAMP time",
)
- val e = intercept[ConfigException] {
- Helpers.checkInputTopics(kcqlConstant, props)
- }
-
+ val e = Helpers.checkInputTopics(kcqlConstant, props).left.value
+ e should be(a[ConfigException])
e.getMessage.contains("topic1WITHTIMESTAMPtime") shouldBe true
}
}
diff --git a/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestSSLConfigContext.scala b/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestSSLConfigContext.scala
index 0b3baea4f..861a5e2ce 100644
--- a/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestSSLConfigContext.scala
+++ b/kafka-connect-common/src/test/scala/io/lenses/streamreactor/common/config/TestSSLConfigContext.scala
@@ -13,52 +13,3 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.common.config
-
-import org.scalatest.BeforeAndAfter
-import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.AnyWordSpec
-
-import javax.net.ssl.KeyManager
-import javax.net.ssl.SSLContext
-import javax.net.ssl.TrustManager
-
-/**
- * Created by andrew@datamountaineer.com on 19/04/16.
- * stream-reactor
- */
-class TestSSLConfigContext extends AnyWordSpec with Matchers with BeforeAndAfter {
- var sslConfig: SSLConfig = null
- var sslConfigNoClient: SSLConfig = null
-
- before {
- val trustStorePath = getClass.getResource("/stc_truststore.jks").getPath
- val keystorePath = getClass.getResource("/stc_keystore.jks").getPath
- val trustStorePassword = "erZHDS9Eo0CcNo"
- val keystorePassword = "8yJQLUnGkwZxOw"
- sslConfig = SSLConfig(trustStorePath, trustStorePassword, Some(keystorePath), Some(keystorePassword), true)
- sslConfigNoClient = SSLConfig(trustStorePath, trustStorePassword, Some(keystorePath), Some(keystorePassword), false)
- }
-
- "SSLConfigContext" should {
- "should return an Array of KeyManagers" in {
- val keyManagers = SSLConfigContext.getKeyManagers(sslConfig)
- keyManagers.length shouldBe 1
- val entry = keyManagers.head
- entry shouldBe a[KeyManager]
- }
-
- "should return an Array of TrustManagers" in {
- val trustManager = SSLConfigContext.getTrustManagers(sslConfig)
- trustManager.length shouldBe 1
- val entry = trustManager.head
- entry shouldBe a[TrustManager]
- }
-
- "should return a SSLContext" in {
- val context = SSLConfigContext(sslConfig)
- context.getProtocol shouldBe "SSL"
- context shouldBe a[SSLContext]
- }
- }
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkConnector.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkConnector.scala
similarity index 72%
rename from kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkConnector.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkConnector.scala
index 6a5a0f64b..d7352c4df 100644
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkConnector.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkConnector.scala
@@ -13,31 +13,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
+package io.lenses.streamreactor.connect.elastic.common
+import cats.implicits.toBifunctorOps
import io.lenses.streamreactor.common.config.Helpers
import io.lenses.streamreactor.common.utils.JarManifest
-
-import java.util
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfigConstants
import com.typesafe.scalalogging.StrictLogging
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.sink.SinkConnector
+import java.util
import scala.jdk.CollectionConverters.MapHasAsScala
import scala.jdk.CollectionConverters.SeqHasAsJava
-class ElasticSinkConnector extends SinkConnector with StrictLogging {
+abstract class ElasticSinkConnector[C <: ElasticSettings, CO <: ElasticConfigDef, T <: ElasticSinkTask[C, CO]](
+ sinkClass: Class[T],
+ constants: CO,
+) extends SinkConnector
+ with StrictLogging {
private var configProps: Option[util.Map[String, String]] = None
- private val configDef = ElasticConfig.config
- private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
+ private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
/**
* States which SinkTask class to use
*/
- override def taskClass(): Class[_ <: Task] = classOf[ElasticSinkTask]
+ override def taskClass(): Class[_ <: Task] = sinkClass
/**
* Set the configuration for each work and determine the split
@@ -57,11 +60,11 @@ class ElasticSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Elastic sink task.")
- Helpers.checkInputTopics(ElasticConfigConstants.KCQL, props.asScala.toMap)
+ Helpers.checkInputTopics(constants.KCQL, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
override def stop(): Unit = {}
override def version(): String = manifest.version()
- override def config(): ConfigDef = configDef
+ override def config(): ConfigDef = constants.configDef
}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkTask.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkTask.scala
similarity index 50%
rename from kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkTask.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkTask.scala
index d29d6201d..2e6c194c4 100644
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkTask.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkTask.scala
@@ -13,51 +13,79 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
+package io.lenses.streamreactor.connect.elastic.common
+import cats.effect.IO
+import cats.effect.unsafe.implicits.global
+import cats.implicits.catsSyntaxOptionId
+import cats.implicits.toBifunctorOps
+import io.lenses.streamreactor.common.errors.ErrorHandler
import io.lenses.streamreactor.common.errors.RetryErrorPolicy
import io.lenses.streamreactor.common.utils.AsciiArtPrinter.printAsciiHeader
import io.lenses.streamreactor.common.utils.JarManifest
import io.lenses.streamreactor.common.utils.ProgressCounter
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfigConstants
-import io.lenses.streamreactor.connect.elastic7.config.ElasticSettings
import com.typesafe.scalalogging.StrictLogging
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticClientCreator
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticJsonWriter
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticWriter
import org.apache.kafka.clients.consumer.OffsetAndMetadata
import org.apache.kafka.common.TopicPartition
+import org.apache.kafka.connect.errors.ConnectException
import org.apache.kafka.connect.sink.SinkRecord
import org.apache.kafka.connect.sink.SinkTask
import java.util
+import scala.concurrent.duration.Duration
+import scala.concurrent.duration.DurationInt
import scala.jdk.CollectionConverters.IterableHasAsScala
import scala.jdk.CollectionConverters.MapHasAsScala
-class ElasticSinkTask extends SinkTask with StrictLogging {
- private var writer: Option[ElasticJsonWriter] = None
+abstract class ElasticSinkTask[C <: ElasticSettings, CD <: ElasticConfigDef](
+ configReader: ElasticSettingsReader[C, CD],
+ writerCreator: ElasticClientCreator[C],
+ configDef: CD,
+ asciiArt: String,
+) extends SinkTask
+ with StrictLogging
+ with ErrorHandler {
+
+ private var writer: Option[ElasticWriter] = None
private val progressCounter = new ProgressCounter
private var enableProgress: Boolean = false
private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
+ private var writeTimeout: Option[Duration] = None
/**
* Parse the configurations and setup the writer
*/
override def start(props: util.Map[String, String]): Unit = {
- printAsciiHeader(manifest, "/elastic-ascii.txt")
+ printAsciiHeader(manifest, asciiArt)
val conf = if (context.configs().isEmpty) props else context.configs()
- ElasticConfig.config.parse(conf)
- val sinkConfig = ElasticConfig(conf.asScala.toMap)
- enableProgress = sinkConfig.getBoolean(ElasticConfigConstants.PROGRESS_COUNTER_ENABLED)
+ val settings: C = configReader.read(configDef, conf.asScala.toMap).leftMap(t =>
+ throw new ConnectException("exception reading config", t),
+ ).merge
+
+ enableProgress = settings.common.progressCounter
//if error policy is retry set retry interval
- val settings = ElasticSettings(sinkConfig)
- settings.errorPolicy match {
- case RetryErrorPolicy() => context.timeout(sinkConfig.getInt(ElasticConfigConstants.ERROR_RETRY_INTERVAL).toLong)
+ settings.common.errorPolicy match {
+ case RetryErrorPolicy() => context.timeout(settings.common.errorRetryInterval)
case _ =>
}
- writer = Some(ElasticWriter(sinkConfig))
+ //initialize error tracker
+ initialize(settings.common.taskRetries, settings.common.errorPolicy)
+
+ writeTimeout = settings.common.writeTimeout.seconds.some
+ val elasticClientWrapper =
+ writerCreator.create(settings).leftMap(t => throw new ConnectException("exception creating connection", t)).merge
+ val elasticJsonWriter = new ElasticJsonWriter(elasticClientWrapper, settings.common)
+ writer = Some(elasticJsonWriter)
}
/**
@@ -66,8 +94,11 @@ class ElasticSinkTask extends SinkTask with StrictLogging {
override def put(records: util.Collection[SinkRecord]): Unit = {
require(writer.nonEmpty, "Writer is not set!")
val seq = records.asScala.toVector
- writer.foreach(_.write(seq))
+ val ioWrite = writer.map(_.write(seq).attempt).getOrElse(IO(Right(())))
+ val timeoutIo = writeTimeout.fold(ioWrite)(wT => ioWrite.timeout(wT))
+
+ handleTry(timeoutIo.map(_.toTry).unsafeRunSync())
if (enableProgress) {
progressCounter.update(seq)
}
@@ -78,7 +109,7 @@ class ElasticSinkTask extends SinkTask with StrictLogging {
*/
override def stop(): Unit = {
logger.info("Stopping Elastic sink.")
- writer.foreach(w => w.close())
+ writer.foreach(_.close())
progressCounter.empty()
}
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/ClockFixture.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/ElasticClientWrapper.scala
similarity index 71%
rename from kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/ClockFixture.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/ElasticClientWrapper.scala
index 76e9c9bcd..cba3036bd 100644
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/ClockFixture.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/ElasticClientWrapper.scala
@@ -13,12 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6.indexname
+package io.lenses.streamreactor.connect.elastic.common.client
-import java.time.Clock
-import java.time.Instant
-import java.time.ZoneOffset
+import cats.effect.IO
-trait ClockFixture {
- val TestClock = Clock.fixed(Instant.parse("2016-10-02T14:00:00.00Z"), ZoneOffset.UTC)
+trait ElasticClientWrapper {
+
+ def createIndex(indexName: String): IO[Unit]
+
+ def close(): IO[Unit]
+
+ def execute(reqs: Seq[Request]): IO[Unit]
}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/Request.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/Request.scala
new file mode 100644
index 000000000..2ec701024
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/Request.scala
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.client
+
+import com.fasterxml.jackson.databind.JsonNode
+
+trait Request
+case class InsertRequest(index: String, id: String, json: JsonNode, pipeline: String) extends Request
+case class UpsertRequest(index: String, id: String, json: JsonNode) extends Request
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettings.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettings.scala
new file mode 100644
index 000000000..f1689c194
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettings.scala
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import io.lenses.kcql.Kcql
+import io.lenses.streamreactor.common.errors.ErrorPolicy
+
+case class ElasticCommonSettings(
+ kcqls: Seq[Kcql],
+ errorPolicy: ErrorPolicy,
+ taskRetries: Int,
+ writeTimeout: Int,
+ batchSize: Int,
+ pkJoinerSeparator: String,
+ progressCounter: Boolean,
+ errorRetryInterval: Long,
+) extends ElasticSettings {
+ override def common: ElasticCommonSettings = this
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettingsReader.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettingsReader.scala
new file mode 100644
index 000000000..1cfd32024
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettingsReader.scala
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import io.lenses.kcql.Kcql
+
+import scala.util.Try
+
+object ElasticCommonSettingsReader extends ElasticSettingsReader[ElasticCommonSettings, ElasticConfigDef] {
+ override def read(configDef: ElasticConfigDef, props: Map[String, String]): Either[Throwable, ElasticCommonSettings] =
+ for {
+ config <- Try(ElasticConfig(configDef, configDef.connectorPrefix, props)).toEither
+
+ kcql = config.getString(configDef.KCQL).split(";").filter(_.trim.nonEmpty).map(Kcql.parse).toIndexedSeq
+ pkJoinerSeparator = config.getString(configDef.PK_JOINER_SEPARATOR)
+ writeTimeout = config.getWriteTimeout
+ errorPolicy = config.getErrorPolicy
+ retries = config.getNumberRetries
+ progressCounter = config.getBoolean(configDef.PROGRESS_COUNTER_ENABLED)
+
+ errorRetryInterval = config.getLong(configDef.ERROR_RETRY_INTERVAL).toLong
+ batchSize = config.getInt(configDef.BATCH_SIZE_CONFIG)
+ } yield {
+ ElasticCommonSettings(
+ kcql,
+ errorPolicy,
+ retries,
+ writeTimeout,
+ batchSize,
+ pkJoinerSeparator,
+ progressCounter,
+ errorRetryInterval,
+ )
+ }
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfig.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfig.scala
new file mode 100644
index 000000000..0d961b957
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfig.scala
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import io.lenses.streamreactor.common.config.base.traits.BaseConfig
+import io.lenses.streamreactor.common.config.base.traits.ErrorPolicySettings
+import io.lenses.streamreactor.common.config.base.traits.NumberRetriesSettings
+import io.lenses.streamreactor.common.config.base.traits.WriteTimeoutSettings
+
+/**
+ *
ElasticSinkConfig
+ *
+ * Holds config, extends AbstractConfig.
+ */
+case class ElasticConfig(configDef: ElasticConfigDef, prefix: String, props: Map[String, String])
+ extends BaseConfig(prefix, configDef.configDef, props)
+ with WriteTimeoutSettings
+ with ErrorPolicySettings
+ with NumberRetriesSettings
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfigDef.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfigDef.scala
new file mode 100644
index 000000000..03b58c583
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfigDef.scala
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import io.lenses.streamreactor.common.config.base.const.TraitConfigConst._
+import org.apache.kafka.common.config.ConfigDef
+import org.apache.kafka.common.config.ConfigDef.Importance
+import org.apache.kafka.common.config.ConfigDef.Type
+
+abstract class ElasticConfigDef(val connectorPrefix: String) {
+
+ def configDef: ConfigDef =
+ new ConfigDef()
+ .define(
+ WRITE_TIMEOUT_CONFIG,
+ Type.INT,
+ WRITE_TIMEOUT_DEFAULT,
+ Importance.MEDIUM,
+ WRITE_TIMEOUT_DOC,
+ "Connection",
+ 6,
+ ConfigDef.Width.MEDIUM,
+ WRITE_TIMEOUT_DISPLAY,
+ )
+ .define(
+ BATCH_SIZE_CONFIG,
+ Type.INT,
+ BATCH_SIZE_DEFAULT,
+ Importance.MEDIUM,
+ BATCH_SIZE_DOC,
+ "Connection",
+ 7,
+ ConfigDef.Width.MEDIUM,
+ BATCH_SIZE_DISPLAY,
+ )
+ .define(
+ ERROR_POLICY_CONFIG,
+ Type.STRING,
+ ERROR_POLICY_DEFAULT,
+ Importance.HIGH,
+ ERROR_POLICY_DOC,
+ "Error",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ ERROR_POLICY_CONFIG,
+ )
+ .define(
+ NBR_OF_RETRIES_CONFIG,
+ Type.INT,
+ NBR_OF_RETIRES_DEFAULT,
+ Importance.MEDIUM,
+ NBR_OF_RETRIES_DOC,
+ "Error",
+ 2,
+ ConfigDef.Width.SHORT,
+ NBR_OF_RETRIES_CONFIG,
+ )
+ .define(
+ ERROR_RETRY_INTERVAL,
+ Type.LONG,
+ ERROR_RETRY_INTERVAL_DEFAULT,
+ Importance.MEDIUM,
+ ERROR_RETRY_INTERVAL_DOC,
+ "Error",
+ 3,
+ ConfigDef.Width.LONG,
+ ERROR_RETRY_INTERVAL,
+ )
+ .define(
+ KCQL,
+ Type.STRING,
+ Importance.HIGH,
+ KCQL_DOC,
+ "KCQL",
+ 1,
+ ConfigDef.Width.LONG,
+ KCQL,
+ )
+ .define(
+ PK_JOINER_SEPARATOR,
+ Type.STRING,
+ PK_JOINER_SEPARATOR_DEFAULT,
+ Importance.LOW,
+ PK_JOINER_SEPARATOR_DOC,
+ "KCQL",
+ 2,
+ ConfigDef.Width.SHORT,
+ PK_JOINER_SEPARATOR,
+ )
+ .define(
+ PROGRESS_COUNTER_ENABLED,
+ Type.BOOLEAN,
+ PROGRESS_COUNTER_ENABLED_DEFAULT,
+ Importance.MEDIUM,
+ PROGRESS_COUNTER_ENABLED_DOC,
+ "Metrics",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ PROGRESS_COUNTER_ENABLED_DISPLAY,
+ )
+ .withClientSslSupport()
+
+ val KCQL: String = s"$connectorPrefix.$KCQL_PROP_SUFFIX"
+ val KCQL_DOC = "KCQL expression describing field selection and routes."
+
+ val ERROR_POLICY_CONFIG = s"$connectorPrefix.$ERROR_POLICY_PROP_SUFFIX"
+ val ERROR_POLICY_DOC: String =
+ """Specifies the action to be taken if an error occurs while inserting the data
+ |There are two available options:
+ |NOOP - the error is swallowed
+ |THROW - the error is allowed to propagate.
+ |RETRY - The exception causes the Connect framework to retry the message. The number of retries is based on
+ |The error will be logged automatically""".stripMargin
+ val ERROR_POLICY_DEFAULT = "THROW"
+
+ val WRITE_TIMEOUT_CONFIG = s"$connectorPrefix.$WRITE_TIMEOUT_SUFFIX"
+ val WRITE_TIMEOUT_DOC = "The time to wait in millis. Default is 5 minutes."
+ val WRITE_TIMEOUT_DISPLAY = "Write timeout"
+ val WRITE_TIMEOUT_DEFAULT = 300000
+
+ val ERROR_RETRY_INTERVAL = s"$connectorPrefix.$RETRY_INTERVAL_PROP_SUFFIX"
+ val ERROR_RETRY_INTERVAL_DOC = "The time in milliseconds between retries."
+ val ERROR_RETRY_INTERVAL_DEFAULT = 60000L
+
+ val NBR_OF_RETRIES_CONFIG = s"$connectorPrefix.$MAX_RETRIES_PROP_SUFFIX"
+ val NBR_OF_RETRIES_DOC = "The maximum number of times to try the write again."
+ val NBR_OF_RETIRES_DEFAULT = 20
+
+ val BATCH_SIZE_CONFIG = s"$connectorPrefix.$BATCH_SIZE_PROP_SUFFIX"
+ val BATCH_SIZE_DOC =
+ "How many records to process at one time. As records are pulled from Kafka it can be 100k+ which will not be feasible to throw at Elastic search at once"
+ val BATCH_SIZE_DISPLAY = "Batch size"
+ val BATCH_SIZE_DEFAULT = 4000
+
+ val PROGRESS_COUNTER_ENABLED: String = PROGRESS_ENABLED_CONST
+ val PROGRESS_COUNTER_ENABLED_DOC = "Enables the output for how many records have been processed"
+ val PROGRESS_COUNTER_ENABLED_DEFAULT = false
+ val PROGRESS_COUNTER_ENABLED_DISPLAY = "Enable progress counter"
+
+ val PK_JOINER_SEPARATOR = s"$connectorPrefix.pk.separator"
+ val PK_JOINER_SEPARATOR_DOC = "Separator used when have more that one field in PK"
+ val PK_JOINER_SEPARATOR_DEFAULT = "-"
+
+}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/package.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettings.scala
similarity index 74%
rename from kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/package.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettings.scala
index 7d848dcc1..8852e458b 100644
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/package.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettings.scala
@@ -13,11 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6
+package io.lenses.streamreactor.connect.elastic.common.config
-package object indexname {
+trait ElasticSettings {
+
+ def common: ElasticCommonSettings
- implicit class StringToOption(text: String) {
- def toOption: Option[String] = if (text.nonEmpty) Some(text) else None
- }
}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettingsReader.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettingsReader.scala
new file mode 100644
index 000000000..f08c9e441
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettingsReader.scala
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+trait ElasticSettingsReader[C <: ElasticSettings, CD <: ElasticConfigDef] {
+
+ def read(configDef: CD, props: Map[String, String]): Either[Throwable, C]
+}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/CreateIndex.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CreateIndex.scala
similarity index 69%
rename from kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/CreateIndex.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CreateIndex.scala
index 3b1f3822b..117f479f2 100644
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/CreateIndex.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CreateIndex.scala
@@ -13,14 +13,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7.indexname
+package io.lenses.streamreactor.connect.elastic.common.indexname
+import cats.effect.IO
import io.lenses.kcql.Kcql
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
/**
* Creates the index for the given KCQL configuration.
*/
object CreateIndex {
+
+ def createIndex(kcql: Kcql, client: ElasticClientWrapper): IO[Unit] = {
+ require(kcql.isAutoCreate, s"Auto-creating indexes hasn't been enabled for target:${kcql.getTarget}")
+
+ client.createIndex(getIndexName(kcql)) *> IO.unit
+ }
+
def getIndexName(kcql: Kcql): String =
Option(kcql.getIndexSuffix).fold(kcql.getTarget) { indexNameSuffix =>
s"${kcql.getTarget}${CustomIndexName.parseIndexName(indexNameSuffix)}"
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/CustomIndexName.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CustomIndexName.scala
similarity index 97%
rename from kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/CustomIndexName.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CustomIndexName.scala
index 5ea265c02..b8d8c0a8f 100644
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/CustomIndexName.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CustomIndexName.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7.indexname
+package io.lenses.streamreactor.connect.elastic.common.indexname
import scala.annotation.tailrec
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/IndexNameFragment.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/IndexNameFragment.scala
similarity index 95%
rename from kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/IndexNameFragment.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/IndexNameFragment.scala
index 1114c88e6..7086e8cd1 100644
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/IndexNameFragment.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/IndexNameFragment.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6.indexname
+package io.lenses.streamreactor.connect.elastic.common.indexname
import java.time.Clock
import java.time.LocalDateTime._
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/package.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/package.scala
similarity index 93%
rename from kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/package.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/package.scala
index 215bd6074..2f2617cfb 100644
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/package.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/package.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
+package io.lenses.streamreactor.connect.elastic.common
package object indexname {
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/PrimaryKeyExtractor.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/PrimaryKeyExtractor.scala
similarity index 99%
rename from kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/PrimaryKeyExtractor.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/PrimaryKeyExtractor.scala
index 34e45a4ff..65a883661 100644
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/PrimaryKeyExtractor.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/PrimaryKeyExtractor.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6
+package io.lenses.streamreactor.connect.elastic.common.transform
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.node._
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/Transform.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/Transform.scala
similarity index 97%
rename from kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/Transform.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/Transform.scala
index acf31c206..ee954973c 100644
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/Transform.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/Transform.scala
@@ -13,9 +13,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6
+package io.lenses.streamreactor.connect.elastic.common.transform
-import java.nio.ByteBuffer
import io.lenses.streamreactor.connect.json.SimpleJsonConverter
import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.databind.JsonNode
@@ -27,11 +26,12 @@ import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.data.Struct
+import java.nio.ByteBuffer
import scala.util.Failure
import scala.util.Success
import scala.util.Try
-private object Transform extends StrictLogging {
+object Transform extends StrictLogging {
lazy val simpleJsonConverter = new SimpleJsonConverter()
def apply(
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/TransformAndExtractPK.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/TransformAndExtractPK.scala
similarity index 97%
rename from kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/TransformAndExtractPK.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/TransformAndExtractPK.scala
index ea3062e97..2d87d5005 100644
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/TransformAndExtractPK.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/TransformAndExtractPK.scala
@@ -13,9 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
-
-import java.nio.ByteBuffer
+package io.lenses.streamreactor.connect.elastic.common.transform
import io.lenses.streamreactor.connect.json.SimpleJsonConverter
import com.fasterxml.jackson.databind.JsonNode
@@ -27,11 +25,12 @@ import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.data.Struct
+import java.nio.ByteBuffer
import scala.util.Failure
import scala.util.Success
import scala.util.Try
-private object TransformAndExtractPK extends StrictLogging {
+object TransformAndExtractPK extends StrictLogging {
lazy val simpleJsonConverter = new SimpleJsonConverter()
def apply(
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticClientCreator.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticClientCreator.scala
new file mode 100644
index 000000000..ba3a785d4
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticClientCreator.scala
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.writers
+
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.config
+
+trait ElasticClientCreator[C <: config.ElasticSettings] {
+ def create(config: C): Either[Throwable, ElasticClientWrapper]
+}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticJsonWriter.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticJsonWriter.scala
similarity index 66%
rename from kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticJsonWriter.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticJsonWriter.scala
index c2fdb3208..f8ba8b53f 100644
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticJsonWriter.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticJsonWriter.scala
@@ -13,45 +13,44 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
+package io.lenses.streamreactor.connect.elastic.common.writers
-import java.util
+import cats.effect.IO
+import cats.effect.unsafe.implicits.global
+import cats.implicits._
import io.lenses.kcql.Kcql
import io.lenses.kcql.WriteModeEnum
import io.lenses.streamreactor.common.converters.FieldConverter
-import io.lenses.streamreactor.common.errors.ErrorHandler
import io.lenses.streamreactor.common.schemas.ConverterUtil
-import io.lenses.streamreactor.connect.elastic7.config.ElasticSettings
-import io.lenses.streamreactor.connect.elastic7.indexname.CreateIndex
-import com.fasterxml.jackson.databind.JsonNode
import io.lenses.sql.Field
-import com.sksamuel.elastic4s.Index
-import com.sksamuel.elastic4s.Indexable
-import com.sksamuel.elastic4s.ElasticDsl._
import com.typesafe.scalalogging.StrictLogging
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.Request
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettings
+import io.lenses.streamreactor.connect.elastic.common.indexname.CreateIndex
+import io.lenses.streamreactor.connect.elastic.common.transform.Transform
+import io.lenses.streamreactor.connect.elastic.common.transform.TransformAndExtractPK
import org.apache.kafka.connect.sink.SinkRecord
+import java.util
import scala.annotation.nowarn
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration._
-import scala.concurrent.Await
-import scala.concurrent.Future
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.util.Try
@nowarn
-class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
- extends ErrorHandler
- with StrictLogging
- with ConverterUtil {
+class ElasticJsonWriter(client: ElasticClientWrapper, settings: ElasticCommonSettings)
+ extends ElasticWriter
+ with ConverterUtil
+ with AutoCloseable
+ with StrictLogging {
logger.info("Initialising Elastic Json writer")
- //initialize error tracker
- initialize(settings.taskRetries, settings.errorPolicy)
-
//create the index automatically if it was set to do so
- settings.kcqls.filter(_.isAutoCreate).foreach(client.index)
+ settings.kcqls.filter(_.isAutoCreate).toList.map(CreateIndex.createIndex(_, client)).traverse(_.attempt).onError(t =>
+ throw t,
+ ).unsafeRunSync()
private val topicKcqlMap = settings.kcqls.groupBy(_.getSource)
@@ -71,23 +70,20 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
}
- implicit object SinkRecordIndexable extends Indexable[SinkRecord] {
- override def json(t: SinkRecord): String = convertValueToJson(t).toString
- }
-
/**
* Close elastic4s client
*/
- def close(): Unit = client.close()
+ override def close(): Unit = client.close()
/**
* Write SinkRecords to Elastic Search if list is not empty
*
* @param records A list of SinkRecords
*/
- def write(records: Vector[SinkRecord]): Unit =
+ override def write(records: Vector[SinkRecord]): IO[Unit] =
if (records.isEmpty) {
logger.debug("No records received.")
+ IO.unit
} else {
logger.debug(s"Received ${records.size} records.")
val grouped = records.groupBy(_.topic())
@@ -99,8 +95,8 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
*
* @param records A list of SinkRecords
*/
- def insert(records: Map[String, Vector[SinkRecord]]): Unit = {
- val fut = records.flatMap {
+ private def insert(records: Map[String, Vector[SinkRecord]]): IO[Unit] =
+ records.flatMap {
case (topic, sinkRecords) =>
val kcqls = topicKcqlMap.getOrElse(
topic,
@@ -115,7 +111,7 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
val kcqlValue = kcqlMap.get(kcql)
sinkRecords.grouped(settings.batchSize)
.map { batch =>
- val indexes = batch.map { r =>
+ val indexes: Seq[Request] = batch.map { r =>
val (json, pks) = if (kcqlValue.primaryKeysPath.isEmpty) {
(Transform(
kcqlValue.fields,
@@ -138,37 +134,25 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
kcql.getWriteMode match {
case WriteModeEnum.INSERT =>
- indexInto(new Index(i))
- .id(if (idFromPk.isEmpty) autoGenId(r) else idFromPk)
- .pipeline(kcql.getPipeline)
- .source(json.toString)
+ val id = if (idFromPk.isEmpty) autoGenId(r) else idFromPk
+ InsertRequest(i, id, json, kcql.getPipeline)
case WriteModeEnum.UPSERT =>
- require(pks.nonEmpty, "Error extracting primary keys")
- updateById(new Index(i), idFromPk)
- .docAsUpsert(json)(IndexableJsonNode)
+ UpsertRequest(i, idFromPk, json)
}
}
- client.execute(bulk(indexes).refreshImmediately)
+ client.execute(indexes)
}
}
- }
-
- handleTry(
- Try(
- Await.result(Future.sequence(fut), settings.writeTimeout.seconds),
- ),
- )
- ()
- }
+ }.toList.traverse(identity).void
/**
* Create id from record infos
*
* @param record One SinkRecord
*/
- def autoGenId(record: SinkRecord): String = {
+ private def autoGenId(record: SinkRecord): String = {
val pks: Seq[Any] = Seq(record.topic(), record.kafkaPartition(), record.kafkaOffset())
pks.mkString(settings.pkJoinerSeparator)
}
@@ -176,7 +160,3 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
private case class KcqlValues(fields: Seq[Field], ignoredFields: Seq[Field], primaryKeysPath: Seq[Vector[String]])
}
-
-case object IndexableJsonNode extends Indexable[JsonNode] {
- override def json(t: JsonNode): String = t.toString
-}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticWriter.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticWriter.scala
new file mode 100644
index 000000000..21a27b30e
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticWriter.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.writers
+
+import cats.effect.IO
+import org.apache.kafka.connect.sink.SinkRecord
+
+trait ElasticWriter {
+
+ /**
+ * Close elastic4s client
+ */
+ def close(): Unit
+
+ /**
+ * Write SinkRecords to Elastic Search if list is not empty
+ *
+ * @param records A list of SinkRecords
+ */
+ def write(records: Vector[SinkRecord]): IO[Unit]
+
+}
diff --git a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/CreateLocalNodeClientUtil.scala b/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/CreateLocalNodeClientUtil.scala
deleted file mode 100644
index 1694d5a43..000000000
--- a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/CreateLocalNodeClientUtil.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package io.lenses.streamreactor.connect.elastic6
-
-import com.sksamuel.elastic4s.http.ElasticClient
-import com.sksamuel.elastic4s.http.ElasticProperties
-import org.testcontainers.elasticsearch.ElasticsearchContainer
-
-object CreateLocalNodeClientUtil {
-
- private val url = "docker.elastic.co/elasticsearch/elasticsearch:6.8.21"
-
- def createLocalNode() = {
- val container = new ElasticsearchContainer(url)
- //container.withReuse(true)
- container.start()
- container
- }
-
- def createLocalNodeClient(localNode: ElasticsearchContainer) = {
- val esProps = ElasticProperties(s"http://${localNode.getHttpHostAddress}")
- ElasticClient(esProps)
- }
-}
diff --git a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterSelectionTest.scala b/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterSelectionTest.scala
deleted file mode 100644
index 28f0363a3..000000000
--- a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterSelectionTest.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- * Copyright 2017 Datamountaineer.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.connect.elastic6.CreateLocalNodeClientUtil.createLocalNode
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic6.config.ElasticSettings
-import com.sksamuel.elastic4s.http.ElasticClient
-import com.sksamuel.elastic4s.http.ElasticDsl._
-import org.apache.kafka.connect.sink.SinkTaskContext
-import org.mockito.MockitoSugar
-
-import java.util.UUID
-import scala.reflect.io.File
-
-class ElasticWriterSelectionTest extends ITBase with MockitoSugar {
- "A ElasticWriter should insert into Elastic Search a number of records" in {
-
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecords()
- //get config
- val config = new ElasticConfig(getElasticSinkConfigPropsSelection())
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- //get writer
-
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //write records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
- //close writer
- writer.close()
- client.close()
- TMP.deleteRecursively()
- }
-
- "A ElasticWriter should insert into Elastic Search a number of records when nested fields are selected" in {
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecordsNested
- //get config
- val config =
- new ElasticConfig(getBaseElasticSinkConfigProps(s"INSERT INTO $INDEX SELECT id, nested.string_field FROM $TOPIC"))
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- //get writer
-
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //write records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
- //close writer
- writer.close()
- client.close()
- TMP.deleteRecursively()
- }
-
- "A ElasticWriter should update records in Elastic Search" in {
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecords()
- //get config
- val config = new ElasticConfig(getElasticSinkUpdateConfigPropsSelection())
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //First run writes records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
-
- val testUpdateRecords = getUpdateTestRecord
-
- //Second run just updates
- writer.write(testUpdateRecords)
-
- Thread.sleep(2000)
- //check counts
- val updateRes = client.execute {
- search(INDEX)
- }.await
- updateRes.result.totalHits shouldBe testRecords.size
-
- //close writer
- writer.close()
- client.close()
- localNode.close()
- TMP.deleteRecursively()
- }
-
- "A ElasticWriter should update records in Elastic Search with PK nested field" in {
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecordsNested
- //get config
- val config = new ElasticConfig(
- getBaseElasticSinkConfigProps(s"UPSERT INTO $INDEX SELECT nested.id, string_field FROM $TOPIC PK nested.id"),
- )
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //First run writes records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
-
- val testUpdateRecords = getUpdateTestRecordNested
-
- //Second run just updates
- writer.write(testUpdateRecords)
-
- Thread.sleep(2000)
- //check counts
- val updateRes = client.execute {
- search(INDEX)
- }.await
- updateRes.result.totalHits shouldBe testRecords.size
-
- //close writer
- writer.close()
- client.close()
- localNode.close()
-
- TMP.deleteRecursively()
- }
-}
diff --git a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterTest.scala b/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterTest.scala
deleted file mode 100644
index 98cdc9864..000000000
--- a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterTest.scala
+++ /dev/null
@@ -1,234 +0,0 @@
-/*
- * Copyright 2017 Datamountaineer.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.connect.elastic6.CreateLocalNodeClientUtil.createLocalNode
-import io.lenses.streamreactor.connect.elastic6.CreateLocalNodeClientUtil.createLocalNodeClient
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic6.config.ElasticSettings
-import com.sksamuel.elastic4s.http.ElasticClient
-import com.sksamuel.elastic4s.http.ElasticDsl._
-import org.elasticsearch.common.settings.Settings
-import org.mockito.MockitoSugar
-import org.scalatest.BeforeAndAfterEach
-import org.testcontainers.elasticsearch.ElasticsearchContainer
-
-import java.nio.file.Paths
-import java.util.UUID
-import scala.reflect.io.File
-
-class ElasticWriterTest extends ITBase with MockitoSugar with BeforeAndAfterEach {
-
- class TestContext {
-
- val TemporaryLocalNodeDir = createTmpDir()
- val RandomClusterName = UUID.randomUUID().toString()
- val TestRecords = getTestRecords()
-
- val DefaultSettings = Settings
- .builder()
- .put("cluster.name", RandomClusterName)
- .put("path.home", TemporaryLocalNodeDir.toString)
- .put("path.data", Paths.get(TemporaryLocalNodeDir.toString()).resolve("data").toString)
- .put("path.repo", Paths.get(TemporaryLocalNodeDir.toString()).resolve("repo").toString)
- .build()
-
- private def createTmpDir(): File = {
- val dirFile = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- dirFile.createDirectory()
- dirFile
- }
-
- def writeTestRecords(props: Map[String, String]) = {
-
- val localNode = createLocalNode()
-
- val client: ElasticClient = createLocalNodeClient(localNode)
-
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), ElasticSettings(ElasticConfig(props)))
-
- writer.write(TestRecords)
- (localNode, client, writer)
- }
- }
-
- "A ElasticWriter should insert into Elastic Search a number of records" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigProps(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.stop()
- TemporaryLocalNodeDir.deleteRecursively()
-
- }
-
- "A ElasticWriter should update a number of records in Elastic Search" in new TestContext {
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkUpdateConfigProps(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- val testUpdateRecords = getUpdateTestRecord
-
- //Second run just updates
- writer.write(testUpdateRecords)
-
- Thread.sleep(2000)
-
- val updateRes = client.execute {
- search(INDEX)
- }.await
- updateRes.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.stop()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-
- "A ElasticWriter should update a number of records in Elastic Search with index suffix defined" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(autoCreate = true),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX_WITH_DATE)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.stop()
- TemporaryLocalNodeDir.deleteRecursively()
-
- }
-
- "It should fail writing to a non-existent index when auto creation is disabled" ignore new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(autoCreate = false, RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val searchResponse = client.execute {
- search(INDEX_WITH_DATE)
- }.await
- searchResponse.isError should be(true)
- searchResponse.error.`type` should be("index_not_found_exception")
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
-
- }
-
- "A ElasticWriter should insert into Elastic Search a number of records with the HTTP Client" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsHTTPClient(),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-
- "A ElasticWriter should insert into with PK Elastic Search a number of records" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsPk(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.write(TestRecords)
-
- Thread.sleep(2000)
-
- val resUpdate = client.execute {
- search(INDEX)
- }.await
- resUpdate.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-
- "A ElasticWriter should insert into without PK Elastic Search a number of records" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigProps(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.write(TestRecords)
-
- Thread.sleep(2000)
-
- val resUpdate = client.execute {
- search(INDEX)
- }.await
- resUpdate.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticJsonWriter.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticJsonWriter.scala
deleted file mode 100644
index 8f7f390a8..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticJsonWriter.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import java.util
-import io.lenses.kcql.Kcql
-import io.lenses.kcql.WriteModeEnum
-import io.lenses.streamreactor.common.converters.FieldConverter
-import io.lenses.streamreactor.common.errors.ErrorHandler
-import io.lenses.streamreactor.common.schemas.ConverterUtil
-import io.lenses.streamreactor.connect.elastic6.config.ElasticSettings
-import io.lenses.streamreactor.connect.elastic6.indexname.CreateIndex
-import com.fasterxml.jackson.databind.JsonNode
-import io.lenses.sql.Field
-import com.sksamuel.elastic4s.Indexable
-import com.sksamuel.elastic4s.http.ElasticDsl._
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.kafka.connect.sink.SinkRecord
-
-import scala.annotation.nowarn
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration._
-import scala.concurrent.Await
-import scala.concurrent.Future
-import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.util.Try
-
-@nowarn
-class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
- extends ErrorHandler
- with StrictLogging
- with ConverterUtil {
-
- logger.info("Initialising Elastic Json writer")
-
- //initialize error tracker
- initialize(settings.taskRetries, settings.errorPolicy)
-
- //create the index automatically if it was set to do so
- settings.kcqls.filter(_.isAutoCreate).foreach(client.index)
-
- private val topicKcqlMap = settings.kcqls.groupBy(_.getSource)
-
- private val kcqlMap = new util.IdentityHashMap[Kcql, KcqlValues]()
- settings.kcqls.foreach { kcql =>
- kcqlMap.put(
- kcql,
- KcqlValues(
- kcql.getFields.asScala.map(FieldConverter.apply).toSeq,
- kcql.getIgnoredFields.asScala.map(FieldConverter.apply).toSeq,
- kcql.getPrimaryKeys.asScala.map { pk =>
- val path = Option(pk.getParentFields).map(_.asScala.toVector).getOrElse(Vector.empty)
- path :+ pk.getName
- }.toSeq,
- ),
- )
-
- }
-
- /**
- * Close elastic4s client
- */
- def close(): Unit = client.close()
-
- /**
- * Write SinkRecords to Elastic Search if list is not empty
- *
- * @param records A list of SinkRecords
- */
- def write(records: Vector[SinkRecord]): Unit =
- if (records.isEmpty) {
- logger.debug("No records received.")
- } else {
- logger.debug(s"Received ${records.size} records.")
- val grouped = records.groupBy(_.topic())
- insert(grouped)
- }
-
- /**
- * Create a bulk index statement and execute against elastic4s client
- *
- * @param records A list of SinkRecords
- */
- def insert(records: Map[String, Vector[SinkRecord]]): Unit = {
- val fut = records.flatMap {
- case (topic, sinkRecords) =>
- val kcqls = topicKcqlMap.getOrElse(
- topic,
- throw new IllegalArgumentException(
- s"$topic hasn't been configured in KCQL. Configured topics is ${topicKcqlMap.keys.mkString(",")}",
- ),
- )
-
- //we might have multiple inserts from the same Kafka Message
- kcqls.flatMap { kcql =>
- val i = CreateIndex.getIndexName(kcql)
- val documentType = Option(kcql.getDocType).getOrElse(i)
- val kcqlValue = kcqlMap.get(kcql)
- sinkRecords.grouped(settings.batchSize)
- .map { batch =>
- val indexes = batch.map { r =>
- val (json, pks) = if (kcqlValue.primaryKeysPath.isEmpty) {
- (Transform(
- kcqlValue.fields,
- r.valueSchema(),
- r.value(),
- kcql.hasRetainStructure,
- ),
- Seq.empty,
- )
- } else {
- TransformAndExtractPK(
- kcqlValue.fields,
- kcqlValue.primaryKeysPath,
- r.valueSchema(),
- r.value(),
- kcql.hasRetainStructure,
- )
- }
- val idFromPk = pks.mkString(settings.pkJoinerSeparator)
-
- kcql.getWriteMode match {
- case WriteModeEnum.INSERT =>
- indexInto(i / documentType)
- .id(if (idFromPk.isEmpty) autoGenId(r) else idFromPk)
- .pipeline(kcql.getPipeline)
- .source(json.toString)
-
- case WriteModeEnum.UPSERT =>
- require(pks.nonEmpty, "Error extracting primary keys")
- update(idFromPk)
- .in(i / documentType)
- .docAsUpsert(json)(IndexableJsonNode)
- }
- }
-
- client.execute(bulk(indexes).refreshImmediately)
- }
- }
- }
-
- handleTry(
- Try(
- Await.result(Future.sequence(fut), settings.writeTimeout.seconds),
- ),
- )
- ()
- }
-
- /**
- * Create id from record infos
- *
- * @param record One SinkRecord
- */
- def autoGenId(record: SinkRecord): String = {
- val pks: Seq[Any] = Seq(record.topic(), record.kafkaPartition(), record.kafkaOffset())
- pks.mkString(settings.pkJoinerSeparator)
- }
-
- private case class KcqlValues(fields: Seq[Field], ignoredFields: Seq[Field], primaryKeysPath: Seq[Vector[String]])
-
-}
-
-case object IndexableJsonNode extends Indexable[JsonNode] {
- override def json(t: JsonNode): String = t.toString
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkConnector.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkConnector.scala
deleted file mode 100644
index 71d3a8b08..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkConnector.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.common.config.Helpers
-import io.lenses.streamreactor.common.utils.JarManifest
-
-import java.util
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfigConstants
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.kafka.common.config.ConfigDef
-import org.apache.kafka.connect.connector.Task
-import org.apache.kafka.connect.sink.SinkConnector
-
-import scala.jdk.CollectionConverters.MapHasAsScala
-import scala.jdk.CollectionConverters.SeqHasAsJava
-
-class ElasticSinkConnector extends SinkConnector with StrictLogging {
- private var configProps: Option[util.Map[String, String]] = None
- private val configDef = ElasticConfig.config
- private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
-
- /**
- * States which SinkTask class to use
- */
- override def taskClass(): Class[_ <: Task] = classOf[ElasticSinkTask]
-
- /**
- * Set the configuration for each work and determine the split
- *
- * @param maxTasks The max number of task workers be can spawn
- * @return a List of configuration properties per worker
- */
- override def taskConfigs(maxTasks: Int): util.List[util.Map[String, String]] = {
- logger.info(s"Setting task configurations for $maxTasks workers.")
- (1 to maxTasks).map(_ => configProps.get).toList.asJava
- }
-
- /**
- * Start the sink and set to configuration
- *
- * @param props A map of properties for the connector and worker
- */
- override def start(props: util.Map[String, String]): Unit = {
- logger.info(s"Starting Elastic sink task.")
- Helpers.checkInputTopics(ElasticConfigConstants.KCQL, props.asScala.toMap)
- configProps = Some(props)
- }
-
- override def stop(): Unit = {}
- override def version(): String = manifest.version()
- override def config(): ConfigDef = configDef
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkTask.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkTask.scala
deleted file mode 100644
index c8609df0a..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkTask.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.common.errors.RetryErrorPolicy
-import io.lenses.streamreactor.common.utils.AsciiArtPrinter.printAsciiHeader
-import io.lenses.streamreactor.common.utils.JarManifest
-import io.lenses.streamreactor.common.utils.ProgressCounter
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfigConstants
-import io.lenses.streamreactor.connect.elastic6.config.ElasticSettings
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.kafka.clients.consumer.OffsetAndMetadata
-import org.apache.kafka.common.TopicPartition
-import org.apache.kafka.connect.sink.SinkRecord
-import org.apache.kafka.connect.sink.SinkTask
-
-import java.util
-import scala.jdk.CollectionConverters.IterableHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsScala
-
-class ElasticSinkTask extends SinkTask with StrictLogging {
- private var writer: Option[ElasticJsonWriter] = None
- private val progressCounter = new ProgressCounter
- private var enableProgress: Boolean = false
- private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
-
- /**
- * Parse the configurations and setup the writer
- */
- override def start(props: util.Map[String, String]): Unit = {
- printAsciiHeader(manifest, "/elastic-ascii.txt")
-
- val conf = if (context.configs().isEmpty) props else context.configs()
-
- ElasticConfig.config.parse(conf)
- val sinkConfig = ElasticConfig(conf.asScala.toMap)
- enableProgress = sinkConfig.getBoolean(ElasticConfigConstants.PROGRESS_COUNTER_ENABLED)
-
- //if error policy is retry set retry interval
- val settings = ElasticSettings(sinkConfig)
- settings.errorPolicy match {
- case RetryErrorPolicy() => context.timeout(sinkConfig.getInt(ElasticConfigConstants.ERROR_RETRY_INTERVAL).toLong)
- case _ =>
- }
-
- writer = Some(ElasticWriter(sinkConfig))
- }
-
- /**
- * Pass the SinkRecords to the writer for Writing
- */
- override def put(records: util.Collection[SinkRecord]): Unit = {
- require(writer.nonEmpty, "Writer is not set!")
- val seq = records.asScala.toVector
- writer.foreach(_.write(seq))
-
- if (enableProgress) {
- progressCounter.update(seq)
- }
- }
-
- /**
- * Clean up writer
- */
- override def stop(): Unit = {
- logger.info("Stopping Elastic sink.")
- writer.foreach(w => w.close())
- progressCounter.empty()
- }
-
- override def flush(map: util.Map[TopicPartition, OffsetAndMetadata]): Unit =
- logger.info("Flushing Elastic Sink")
-
- override def version: String = manifest.version()
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriter.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriter.scala
deleted file mode 100644
index 4f4ab7559..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriter.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfigConstants
-import io.lenses.streamreactor.connect.elastic6.config.ElasticSettings
-import com.sksamuel.elastic4s.http.ElasticNodeEndpoint
-
-import scala.util.Failure
-import scala.util.Success
-import scala.util.Try
-
-object ElasticWriter {
-
- /**
- * Construct a JSONWriter.
- *
- * @param config An elasticSinkConfig to extract settings from.
- * @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
- */
- def apply(config: ElasticConfig): ElasticJsonWriter = {
-
- val hostNames = config.getString(ElasticConfigConstants.HOSTS).split(",")
- val protocol = config.getString(ElasticConfigConstants.PROTOCOL)
- val port = config.getInt(ElasticConfigConstants.ES_PORT)
- val prefix = Try(config.getString(ElasticConfigConstants.ES_PREFIX)) match {
- case Success("") => None
- case Success(configString) => Some(configString)
- case Failure(_) => None
- }
-
- val settings = ElasticSettings(config)
-
- new ElasticJsonWriter(
- KElasticClient.createHttpClient(settings, endpoints(hostNames, protocol, port, prefix).toIndexedSeq),
- settings,
- )
- }
-
- private def endpoints(hostNames: Array[String], protocol: String, port: Integer, prefix: Option[String]) =
- hostNames
- .map(hostname => ElasticNodeEndpoint(protocol, hostname, port, prefix))
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/KElasticClient.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/KElasticClient.scala
deleted file mode 100644
index 881b46ebb..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/KElasticClient.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.connect.elastic6.config.ElasticSettings
-import io.lenses.streamreactor.connect.elastic6.indexname.CreateIndex.getIndexName
-import com.sksamuel.elastic4s.bulk.BulkRequest
-import com.sksamuel.elastic4s.http.bulk.BulkResponse
-import com.sksamuel.elastic4s.http._
-import com.sksamuel.elastic4s.mappings.MappingDefinition
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.http.auth.AuthScope
-import org.apache.http.auth.UsernamePasswordCredentials
-import org.apache.http.impl.client.BasicCredentialsProvider
-import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
-import org.elasticsearch.client.RestClientBuilder.HttpClientConfigCallback
-
-import scala.concurrent.Future
-
-trait KElasticClient extends AutoCloseable {
- def index(kcql: Kcql): Unit
-
- def execute(definition: BulkRequest): Future[Any]
-}
-
-object KElasticClient extends StrictLogging {
-
- def createHttpClient(settings: ElasticSettings, endpoints: Seq[ElasticNodeEndpoint]): KElasticClient =
- if (settings.httpBasicAuthUsername.nonEmpty && settings.httpBasicAuthPassword.nonEmpty) {
- lazy val provider = {
- val provider = new BasicCredentialsProvider
- val credentials =
- new UsernamePasswordCredentials(settings.httpBasicAuthUsername, settings.httpBasicAuthPassword)
- provider.setCredentials(AuthScope.ANY, credentials)
- provider
- }
- val callback = new HttpClientConfigCallback {
- override def customizeHttpClient(httpClientBuilder: HttpAsyncClientBuilder): HttpAsyncClientBuilder =
- httpClientBuilder.setDefaultCredentialsProvider(provider)
- }
- val client: ElasticClient = ElasticClient(
- ElasticProperties(endpoints),
- requestConfigCallback = NoOpRequestConfigCallback,
- httpClientConfigCallback = callback,
- )
- new HttpKElasticClient(client)
- } else {
- val client: ElasticClient = ElasticClient(ElasticProperties(endpoints))
- new HttpKElasticClient(client)
- }
-}
-
-class HttpKElasticClient(client: ElasticClient) extends KElasticClient {
-
- import com.sksamuel.elastic4s.http.ElasticDsl._
-
- override def index(kcql: Kcql): Unit = {
- require(kcql.isAutoCreate, s"Auto-creating indexes hasn't been enabled for target:${kcql.getTarget}")
-
- val indexName = getIndexName(kcql)
- client.execute {
- Option(kcql.getDocType) match {
- case None => createIndex(indexName)
- case Some(documentType) => createIndex(indexName).mappings(MappingDefinition(documentType))
- }
- }
- ()
- }
-
- override def execute(definition: BulkRequest): Future[Response[BulkResponse]] = client.execute(definition)
-
- override def close(): Unit = client.close()
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/TransformAndExtractPK.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/TransformAndExtractPK.scala
deleted file mode 100644
index b131dc695..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/TransformAndExtractPK.scala
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import java.nio.ByteBuffer
-
-import io.lenses.streamreactor.connect.json.SimpleJsonConverter
-import com.fasterxml.jackson.databind.JsonNode
-import io.lenses.connect.sql.StructSql._
-import io.lenses.json.sql.JacksonJson
-import io.lenses.json.sql.JsonSql._
-import io.lenses.sql.Field
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.kafka.connect.data.Schema
-import org.apache.kafka.connect.data.Struct
-
-import scala.util.Failure
-import scala.util.Success
-import scala.util.Try
-
-private object TransformAndExtractPK extends StrictLogging {
- lazy val simpleJsonConverter = new SimpleJsonConverter()
-
- def apply(
- fields: Seq[Field],
- primaryKeysPaths: Seq[Vector[String]],
- schema: Schema,
- value: Any,
- withStructure: Boolean,
- ): (JsonNode, Seq[Any]) = {
- def raiseException(msg: String, t: Throwable) = throw new IllegalArgumentException(msg, t)
-
- if (value == null) {
- if (schema == null || !schema.isOptional) {
- raiseException("Null value is not allowed.", null)
- } else null
- } else {
- if (schema != null) {
- schema.`type`() match {
- case Schema.Type.BYTES =>
- //we expected to be json
- val array = value match {
- case a: Array[Byte] => a
- case b: ByteBuffer => b.array()
- case other => raiseException(s"Invalid payload:$other for schema Schema.BYTES.", null)
- }
-
- Try(JacksonJson.mapper.readTree(array)) match {
- case Failure(e) => raiseException("Invalid json.", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Failure(e) => raiseException(s"A KCQL exception occurred. ${e.getMessage}", e)
- case Success(jn) =>
- (jn, primaryKeysPaths.map(PrimaryKeyExtractor.extract(json, _)))
- }
- }
-
- case Schema.Type.STRING =>
- //we expected to be json
- Try(JacksonJson.asJson(value.asInstanceOf[String])) match {
- case Failure(e) => raiseException("Invalid json", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Success(jn) => (jn, primaryKeysPaths.map(PrimaryKeyExtractor.extract(json, _)))
- case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
- }
- }
-
- case Schema.Type.STRUCT =>
- val struct = value.asInstanceOf[Struct]
- Try(struct.sql(fields, !withStructure)) match {
- case Success(s) =>
- (simpleJsonConverter.fromConnectData(s.schema(), s),
- primaryKeysPaths.map(PrimaryKeyExtractor.extract(struct, _)),
- )
-
- case Failure(e) => raiseException(s"A KCQL error occurred.${e.getMessage}", e)
- }
-
- case other => raiseException(s"Can't transform Schema type:$other.", null)
- }
- } else {
- //we can handle java.util.Map (this is what JsonConverter can spit out)
- value match {
- case m: java.util.Map[_, _] =>
- val map = m.asInstanceOf[java.util.Map[String, Any]]
- val jsonNode: JsonNode = JacksonJson.mapper.valueToTree[JsonNode](map)
- Try(jsonNode.sql(fields, !withStructure)) match {
- case Success(j) => (j, primaryKeysPaths.map(PrimaryKeyExtractor.extract(jsonNode, _)))
- case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
- }
- case s: String =>
- Try(JacksonJson.asJson(s)) match {
- case Failure(e) => raiseException("Invalid json.", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Success(jn) => (jn, primaryKeysPaths.map(PrimaryKeyExtractor.extract(json, _)))
- case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
- }
- }
-
- case b: Array[Byte] =>
- Try(JacksonJson.mapper.readTree(b)) match {
- case Failure(e) => raiseException("Invalid json.", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Failure(e) => raiseException(s"A KCQL exception occurred. ${e.getMessage}", e)
- case Success(jn) => (jn, primaryKeysPaths.map(PrimaryKeyExtractor.extract(json, _)))
- }
- }
- //we take it as String
- case other => raiseException(s"Value:$other is not handled!", null)
- }
- }
- }
- }
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticConfig.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticConfig.scala
deleted file mode 100644
index b44b10a5e..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticConfig.scala
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6.config
-
-import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.common.config.base.traits.BaseConfig
-import io.lenses.streamreactor.common.config.base.traits.ErrorPolicySettings
-import io.lenses.streamreactor.common.config.base.traits.NumberRetriesSettings
-import io.lenses.streamreactor.common.config.base.traits.WriteTimeoutSettings
-import org.apache.kafka.common.config.ConfigDef
-import org.apache.kafka.common.config.ConfigDef.Importance
-import org.apache.kafka.common.config.ConfigDef.Type
-
-object ElasticConfig {
-
- val config: ConfigDef = new ConfigDef()
- .define(
- ElasticConfigConstants.PROTOCOL,
- Type.STRING,
- ElasticConfigConstants.PROTOCOL_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.PROTOCOL_DOC,
- "Connection",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.PROTOCOL,
- )
- .define(
- ElasticConfigConstants.HOSTS,
- Type.STRING,
- ElasticConfigConstants.HOSTS_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.HOSTS_DOC,
- "Connection",
- 2,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_PORT,
- Type.INT,
- ElasticConfigConstants.ES_PORT_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_PORT_DOC,
- "Connection",
- 3,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_PREFIX,
- Type.STRING,
- ElasticConfigConstants.ES_PREFIX_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_PREFIX_DOC,
- "Connection",
- 4,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_CLUSTER_NAME,
- Type.STRING,
- ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_CLUSTER_NAME_DOC,
- "Connection",
- 5,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.ES_CLUSTER_NAME,
- )
- .define(
- ElasticConfigConstants.WRITE_TIMEOUT_CONFIG,
- Type.INT,
- ElasticConfigConstants.WRITE_TIMEOUT_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.WRITE_TIMEOUT_DOC,
- "Connection",
- 6,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.WRITE_TIMEOUT_DISPLAY,
- )
- .define(
- ElasticConfigConstants.BATCH_SIZE_CONFIG,
- Type.INT,
- ElasticConfigConstants.BATCH_SIZE_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.BATCH_SIZE_DOC,
- "Connection",
- 7,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.BATCH_SIZE_DISPLAY,
- )
- .define(
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME,
- Type.STRING,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC,
- "Connection",
- 8,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME,
- )
- .define(
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD,
- Type.STRING,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC,
- "Connection",
- 9,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD,
- )
- .define(
- ElasticConfigConstants.ERROR_POLICY_CONFIG,
- Type.STRING,
- ElasticConfigConstants.ERROR_POLICY_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ERROR_POLICY_DOC,
- "Error",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.ERROR_POLICY_CONFIG,
- )
- .define(
- ElasticConfigConstants.NBR_OF_RETRIES_CONFIG,
- Type.INT,
- ElasticConfigConstants.NBR_OF_RETIRES_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.NBR_OF_RETRIES_DOC,
- "Error",
- 2,
- ConfigDef.Width.SHORT,
- ElasticConfigConstants.NBR_OF_RETRIES_CONFIG,
- )
- .define(
- ElasticConfigConstants.ERROR_RETRY_INTERVAL,
- Type.INT,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL_DOC,
- "Error",
- 3,
- ConfigDef.Width.LONG,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL,
- )
- .define(
- ElasticConfigConstants.KCQL,
- Type.STRING,
- Importance.HIGH,
- ElasticConfigConstants.KCQL_DOC,
- "KCQL",
- 1,
- ConfigDef.Width.LONG,
- ElasticConfigConstants.KCQL,
- )
- .define(
- ElasticConfigConstants.PK_JOINER_SEPARATOR,
- Type.STRING,
- ElasticConfigConstants.PK_JOINER_SEPARATOR_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.PK_JOINER_SEPARATOR_DOC,
- "KCQL",
- 2,
- ConfigDef.Width.SHORT,
- ElasticConfigConstants.PK_JOINER_SEPARATOR,
- )
- .define(
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED,
- Type.BOOLEAN,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DOC,
- "Metrics",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DISPLAY,
- )
-}
-
-/**
- * ElasticSinkConfig
- *
- * Holds config, extends AbstractConfig.
- */
-case class ElasticConfig(props: Map[String, String])
- extends BaseConfig(ElasticConfigConstants.CONNECTOR_PREFIX, ElasticConfig.config, props)
- with WriteTimeoutSettings
- with ErrorPolicySettings
- with NumberRetriesSettings {
- val kcqlConstant: String = ElasticConfigConstants.KCQL
-
- def getKcql(): Seq[Kcql] =
- getString(kcqlConstant).split(";").filter(_.trim.nonEmpty).map(Kcql.parse).toIndexedSeq
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticConfigConstants.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticConfigConstants.scala
deleted file mode 100644
index 915b1c225..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticConfigConstants.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6.config
-
-import io.lenses.streamreactor.common.config.base.const.TraitConfigConst._
-
-object ElasticConfigConstants {
-
- val CONNECTOR_PREFIX = "connect.elastic"
-
- val PROTOCOL = s"${CONNECTOR_PREFIX}.protocol"
- val PROTOCOL_DOC = "URL protocol (http, https)"
- val PROTOCOL_DEFAULT = "http"
-
- val HOSTS = s"${CONNECTOR_PREFIX}.${CONNECTION_HOSTS_SUFFIX}"
- val HOSTS_DOC = "List of hostnames for Elastic Search cluster node, not including protocol or port."
- val HOSTS_DEFAULT = "localhost"
-
- val ES_PORT = s"${CONNECTOR_PREFIX}.${CONNECTION_PORT_SUFFIX}"
- val ES_PORT_DOC = "Port on which Elastic Search node listens on"
- val ES_PORT_DEFAULT = 9200
-
- val ES_PREFIX = s"${CONNECTOR_PREFIX}.tableprefix"
- val ES_PREFIX_DOC = "Table prefix (optional)"
- val ES_PREFIX_DEFAULT = ""
-
- val ES_CLUSTER_NAME = s"${CONNECTOR_PREFIX}.${CLUSTER_NAME_SUFFIX}"
- val ES_CLUSTER_NAME_DOC = "Name of the elastic search cluster, used in local mode for setting the connection"
- val ES_CLUSTER_NAME_DEFAULT = "elasticsearch"
-
- val KCQL = s"${CONNECTOR_PREFIX}.${KCQL_PROP_SUFFIX}"
- val KCQL_DOC = "KCQL expression describing field selection and routes."
-
- val WRITE_TIMEOUT_CONFIG = s"${CONNECTOR_PREFIX}.${WRITE_TIMEOUT_SUFFIX}"
- val WRITE_TIMEOUT_DOC = "The time to wait in millis. Default is 5 minutes."
- val WRITE_TIMEOUT_DISPLAY = "Write timeout"
- val WRITE_TIMEOUT_DEFAULT = 300000
-
- val CLIENT_HTTP_BASIC_AUTH_USERNAME = s"$CONNECTOR_PREFIX.use.http.username"
- val CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT = ""
- val CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC = "Username if HTTP Basic Auth required default is null."
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD = s"$CONNECTOR_PREFIX.use.http.password"
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT = ""
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC = "Password if HTTP Basic Auth required default is null."
-
- val NBR_OF_RETRIES_CONFIG = s"${CONNECTOR_PREFIX}.${MAX_RETRIES_PROP_SUFFIX}"
- val NBR_OF_RETRIES_DOC = "The maximum number of times to try the write again."
- val NBR_OF_RETIRES_DEFAULT = 20
-
- val ERROR_POLICY_CONFIG = s"${CONNECTOR_PREFIX}.${ERROR_POLICY_PROP_SUFFIX}"
- val ERROR_POLICY_DOC: String =
- """Specifies the action to be taken if an error occurs while inserting the data
- |There are two available options:
- |NOOP - the error is swallowed
- |THROW - the error is allowed to propagate.
- |RETRY - The exception causes the Connect framework to retry the message. The number of retries is based on
- |The error will be logged automatically""".stripMargin
- val ERROR_POLICY_DEFAULT = "THROW"
-
- val BATCH_SIZE_CONFIG = s"$CONNECTOR_PREFIX.$BATCH_SIZE_PROP_SUFFIX"
- val BATCH_SIZE_DOC =
- "How many records to process at one time. As records are pulled from Kafka it can be 100k+ which will not be feasible to throw at Elastic search at once"
- val BATCH_SIZE_DISPLAY = "Batch size"
- val BATCH_SIZE_DEFAULT = 4000
-
- val ERROR_RETRY_INTERVAL = s"${CONNECTOR_PREFIX}.${RETRY_INTERVAL_PROP_SUFFIX}"
- val ERROR_RETRY_INTERVAL_DOC = "The time in milliseconds between retries."
- val ERROR_RETRY_INTERVAL_DEFAULT = "60000"
-
- /*
- val INDEX_NAME_SUFFIX = s"${CONNECTOR_PREFIX}.index.suffix"
- val INDEX_NAME_SUFFIX_DOC = "Suffix to append to the index name. Supports date time notation inside curly brackets. E.g. 'abc_{YYYY-MM-dd}_def'"
- val INDEX_NAME_SUFFIX_DEFAULT: String = null
-
- val AUTO_CREATE_INDEX = s"${CONNECTOR_PREFIX}.index.auto.create"
- val AUTO_CREATE_INDEX_DOC = "The flag enables/disables auto creating the ElasticSearch index. Boolean value required. Defaults to TRUE."
- val AUTO_CREATE_INDEX_DEFAULT = true
-
- val DOCUMENT_TYPE = s"${CONNECTOR_PREFIX}.document.type"
- val DOCUMENT_TYPE_DOC = "Sets the ElasticSearch document type. See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-type-field.html for more info."
- val DOCUMENT_TYPE_DEFAULT: String = null
- */
-
- val PROGRESS_COUNTER_ENABLED = PROGRESS_ENABLED_CONST
- val PROGRESS_COUNTER_ENABLED_DOC = "Enables the output for how many records have been processed"
- val PROGRESS_COUNTER_ENABLED_DEFAULT = false
- val PROGRESS_COUNTER_ENABLED_DISPLAY = "Enable progress counter"
-
- val PK_JOINER_SEPARATOR = s"$CONNECTOR_PREFIX.pk.separator"
- val PK_JOINER_SEPARATOR_DOC = "Separator used when have more that one field in PK"
- val PK_JOINER_SEPARATOR_DEFAULT = "-"
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticSettings.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticSettings.scala
deleted file mode 100644
index 9d3b86713..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/config/ElasticSettings.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6.config
-
-import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.common.errors.ErrorPolicy
-
-/**
- * Created by andrew@datamountaineer.com on 13/05/16.
- * stream-reactor-maven
- */
-case class ElasticSettings(
- kcqls: Seq[Kcql],
- errorPolicy: ErrorPolicy,
- taskRetries: Int = ElasticConfigConstants.NBR_OF_RETIRES_DEFAULT,
- writeTimeout: Int = ElasticConfigConstants.WRITE_TIMEOUT_DEFAULT,
- batchSize: Int = ElasticConfigConstants.BATCH_SIZE_DEFAULT,
- pkJoinerSeparator: String = ElasticConfigConstants.PK_JOINER_SEPARATOR_DEFAULT,
- httpBasicAuthUsername: String = ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
- httpBasicAuthPassword: String = ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
-)
-
-object ElasticSettings {
-
- def apply(config: ElasticConfig): ElasticSettings = {
- val kcql = config.getKcql()
- val pkJoinerSeparator = config.getString(ElasticConfigConstants.PK_JOINER_SEPARATOR)
- val writeTimeout = config.getWriteTimeout
- val errorPolicy = config.getErrorPolicy
- val retries = config.getNumberRetries
- val httpBasicAuthUsername = config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME)
- val httpBasicAuthPassword = config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD)
-
- val batchSize = config.getInt(ElasticConfigConstants.BATCH_SIZE_CONFIG)
-
- ElasticSettings(kcql,
- errorPolicy,
- retries,
- writeTimeout,
- batchSize,
- pkJoinerSeparator,
- httpBasicAuthUsername,
- httpBasicAuthPassword,
- )
- }
-}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/CustomIndexName.scala b/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/CustomIndexName.scala
deleted file mode 100644
index 4f82ea90e..000000000
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/CustomIndexName.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6.indexname
-
-import scala.annotation.tailrec
-
-class InvalidCustomIndexNameException(message: String) extends RuntimeException(message)
-
-case class CustomIndexName(fragments: Vector[IndexNameFragment]) {
- override def toString: String = fragments.map(_.getFragment).mkString
-}
-
-object CustomIndexName {
-
- @tailrec
- private def parseIndexName(
- remainingChars: Vector[Char],
- currentFragment: StringBuilder,
- results: Vector[Option[IndexNameFragment]],
- ): Vector[IndexNameFragment] =
- remainingChars match {
- case head +: rest => head match {
- case DateTimeFragment.OpeningChar =>
- val (dateTimeFormat, afterDateTimeFormatIncludingClosingChar) = rest.span {
- _ != DateTimeFragment.ClosingChar
- }
- val afterDateTimeFormat = afterDateTimeFormatIncludingClosingChar.tail
-
- val maybeCurrentFragment = currentFragment.mkString.toOption
- val maybeDateTimeFormat = dateTimeFormat.mkString.toOption
-
- val newResultsWithDateTimeFragment =
- results :+ maybeCurrentFragment.map(TextFragment.apply) :+ maybeDateTimeFormat.map(DateTimeFragment(_))
-
- parseIndexName(afterDateTimeFormat, new StringBuilder, newResultsWithDateTimeFragment)
- case DateTimeFragment.ClosingChar =>
- throw new InvalidCustomIndexNameException(
- s"Found closing '${DateTimeFragment.ClosingChar}' but no opening character",
- )
- case anyOtherChar => parseIndexName(rest, currentFragment.append(anyOtherChar), results)
- }
- case Vector() =>
- val maybeCurrentFragment = currentFragment.mkString.toOption
- (results :+ maybeCurrentFragment.map(TextFragment.apply)).flatten
- case other => throw new IllegalStateException(s"Invalid match for $other")
- }
-
- def parseIndexName(indexName: String): CustomIndexName =
- CustomIndexName(parseIndexName(indexName.toVector, new StringBuilder, Vector.empty))
-}
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticConfigTest.scala b/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticConfigTest.scala
deleted file mode 100644
index 50e6c5ef9..000000000
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticConfigTest.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfigConstants
-
-class ElasticConfigTest extends TestBase {
- "A ElasticConfig should return the client mode and hostnames" in {
- val config = new ElasticConfig(getElasticSinkConfigProps())
- config.getString(ElasticConfigConstants.HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
- config.getString(ElasticConfigConstants.ES_CLUSTER_NAME) shouldBe ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT
- config.getString(ElasticConfigConstants.KCQL) shouldBe QUERY
- }
-
- "A ElasticConfig should return the http basic auth username and password when set" in {
- val config = new ElasticConfig(getElasticSinkConfigPropsHTTPClient(auth = true))
- config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME) shouldBe BASIC_AUTH_USERNAME
- config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD) shouldBe BASIC_AUTH_PASSWORD
- }
-}
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkConnectorTest.scala b/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkConnectorTest.scala
deleted file mode 100644
index c58fd9329..000000000
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkConnectorTest.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfigConstants
-
-import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
-
-class ElasticSinkConnectorTest extends TestBase {
- "Should start a Elastic Search Connector" in {
- //get config
- val config = getElasticSinkConfigProps()
- //get connector
- val connector = new ElasticSinkConnector()
- //start with config
- connector.start(config.asJava)
- //check config
- val taskConfigs = connector.taskConfigs(10)
- taskConfigs.asScala.head.get(ElasticConfigConstants.HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
- taskConfigs.size() shouldBe 10
- //check connector
- connector.taskClass() shouldBe classOf[ElasticSinkTask]
- connector.stop()
- }
-}
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkTaskTest.scala b/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkTaskTest.scala
deleted file mode 100644
index bf1c5c69f..000000000
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticSinkTaskTest.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import org.apache.kafka.connect.sink.SinkTaskContext
-import org.mockito.MockitoSugar
-
-import scala.jdk.CollectionConverters.MapHasAsJava
-
-class ElasticSinkTaskTest extends TestBase with MockitoSugar {
- "A ElasticSinkTask should start and write to Elastic Search" in {
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get config
- val config = getElasticSinkConfigProps()
- //get task
- val task = new ElasticSinkTask()
- //initialise the tasks context
- task.initialize(context)
- //check version
- task.version() shouldBe ""
- //start task
- task.start(config.asJava)
- //simulate the call from Connect
- //task.put(testRecords.asJava)
- //stop task
- task.stop()
- }
-}
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterCredentialsTest.scala b/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterCredentialsTest.scala
deleted file mode 100644
index 7768f9a16..000000000
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/ElasticWriterCredentialsTest.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6
-
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic6.config.ElasticSettings
-
-class ElasticWriterCredentialsTest extends TestBase {
-
- "A writer should be using HTTP is set with HTTP Basic Auth Credentials" in {
- val config = new ElasticConfig(getElasticSinkConfigPropsHTTPClient(auth = true))
- val settings = ElasticSettings(config)
- settings.httpBasicAuthUsername shouldBe BASIC_AUTH_USERNAME
- settings.httpBasicAuthPassword shouldBe BASIC_AUTH_PASSWORD
- }
-}
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/IndexNameFragmentTest.scala b/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/IndexNameFragmentTest.scala
deleted file mode 100644
index 60d24f27d..000000000
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/IndexNameFragmentTest.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic6.indexname
-
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should.Matchers
-
-class IndexNameFragmentTest extends AnyFlatSpec with Matchers {
-
-// "TextFragment" should "return the original text when using getFragment()" in {
-// forAll(Gen.alphaStr) { someString =>
-// TextFragment(someString).getFragment shouldBe someString
-// }
-// }
-
- "DateTimeFragment" should "return the formatted date when using getFragment()" in new ClockFixture {
- val dateTimeFormat = "YYYY-MM-dd HH:mm:ss"
- val expectedResult = "2016-10-02 14:00:00"
- DateTimeFragment(dateTimeFormat, TestClock).getFragment shouldBe expectedResult
- }
-}
diff --git a/kafka-connect-elastic7/src/fun/resources/logback.xml b/kafka-connect-elastic7/src/fun/resources/logback.xml
deleted file mode 100644
index c1725fb8d..000000000
--- a/kafka-connect-elastic7/src/fun/resources/logback.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- System.out
-
- %d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/CreateLocalNodeClientUtil.scala b/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/CreateLocalNodeClientUtil.scala
deleted file mode 100644
index 1448afcae..000000000
--- a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/CreateLocalNodeClientUtil.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package io.lenses.streamreactor.connect.elastic7
-
-import com.sksamuel.elastic4s.http.JavaClient
-import com.sksamuel.elastic4s.ElasticClient
-import com.sksamuel.elastic4s.ElasticProperties
-import org.testcontainers.elasticsearch.ElasticsearchContainer
-
-object CreateLocalNodeClientUtil {
-
- private val url = "docker.elastic.co/elasticsearch/elasticsearch:7.2.0"
-
- def createLocalNode() = {
- val container = new ElasticsearchContainer(url)
- //container.withReuse(true)
- container.start()
- container
- }
-
- def createLocalNodeClient(localNode: ElasticsearchContainer) = {
- val esProps = ElasticProperties(s"http://${localNode.getHttpHostAddress}")
- ElasticClient(JavaClient(esProps))
- }
-}
diff --git a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterSelectionTest.scala b/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterSelectionTest.scala
deleted file mode 100644
index d913dc929..000000000
--- a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterSelectionTest.scala
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Copyright 2017 Datamountaineer.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.elastic7.CreateLocalNodeClientUtil.createLocalNode
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic7.config.ElasticSettings
-import com.sksamuel.elastic4s.ElasticClient
-import com.sksamuel.elastic4s.ElasticDsl._
-import org.apache.kafka.connect.sink.SinkTaskContext
-import org.mockito.MockitoSugar
-
-import java.util.UUID
-import scala.reflect.io.File
-
-class ElasticWriterSelectionTest extends ITBase with MockitoSugar {
- "A ElasticWriter should insert into Elastic Search a number of records" in {
-
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecords()
- //get config
- val config = new ElasticConfig(getElasticSinkConfigPropsSelection())
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- //get writer
-
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //write records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
- //close writer
- writer.close()
- client.close()
- localNode.close()
- TMP.deleteRecursively()
- }
-
- "A ElasticWriter should insert into Elastic Search a number of records when nested fields are selected" in {
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecordsNested
- //get config
- val config =
- new ElasticConfig(getBaseElasticSinkConfigProps(s"INSERT INTO $INDEX SELECT id, nested.string_field FROM $TOPIC"))
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- //get writer
-
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //write records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
- //close writer
- writer.close()
- client.close()
- localNode.close()
- TMP.deleteRecursively()
- }
-
- "A ElasticWriter should update records in Elastic Search" in {
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecords()
- //get config
- val config = new ElasticConfig(getElasticSinkUpdateConfigPropsSelection())
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //First run writes records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
-
- val testUpdateRecords = getUpdateTestRecord
-
- //Second run just updates
- writer.write(testUpdateRecords)
-
- Thread.sleep(2000)
- //check counts
- val updateRes = client.execute {
- search(INDEX)
- }.await
- updateRes.result.totalHits shouldBe testRecords.size
-
- //close writer
- writer.close()
- client.close()
- localNode.close()
- TMP.deleteRecursively()
- }
-
- "A ElasticWriter should update records in Elastic Search with PK nested field" in {
- val TMP = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- TMP.createDirectory()
- //mock the context to return our assignment when called
- val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
- //get test records
- val testRecords = getTestRecordsNested
- //get config
- val config = new ElasticConfig(
- getBaseElasticSinkConfigProps(s"UPSERT INTO $INDEX SELECT nested.id, string_field FROM $TOPIC PK nested.id"),
- )
-
- val localNode = createLocalNode()
- val client: ElasticClient = CreateLocalNodeClientUtil.createLocalNodeClient(localNode)
- val settings = ElasticSettings(config)
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), settings)
- //First run writes records to elastic
- writer.write(testRecords)
-
- Thread.sleep(2000)
- //check counts
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe testRecords.size
-
- val testUpdateRecords = getUpdateTestRecordNested
-
- //Second run just updates
- writer.write(testUpdateRecords)
-
- Thread.sleep(2000)
- //check counts
- val updateRes = client.execute {
- search(INDEX)
- }.await
- updateRes.result.totalHits shouldBe testRecords.size
-
- //close writer
- writer.close()
- client.close()
- localNode.close()
-
- TMP.deleteRecursively()
- }
-}
diff --git a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterTest.scala b/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterTest.scala
deleted file mode 100644
index 8c5efc24e..000000000
--- a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterTest.scala
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * Copyright 2017 Datamountaineer.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.elastic7.CreateLocalNodeClientUtil._
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic7.config.ElasticSettings
-import com.sksamuel.elastic4s.ElasticClient
-import com.sksamuel.elastic4s.ElasticDsl._
-import org.elasticsearch.common.settings.Settings
-import org.mockito.MockitoSugar
-import org.scalatest.BeforeAndAfterEach
-import org.testcontainers.elasticsearch.ElasticsearchContainer
-
-import java.nio.file.Paths
-import java.util.UUID
-import scala.reflect.io.File
-
-class ElasticWriterTest extends ITBase with MockitoSugar with BeforeAndAfterEach {
-
- class TestContext {
-
- val TemporaryLocalNodeDir = createTmpDir()
- val RandomClusterName = UUID.randomUUID().toString()
- val TestRecords = getTestRecords()
-
- val DefaultSettings = Settings
- .builder()
- .put("cluster.name", RandomClusterName)
- .put("path.home", TemporaryLocalNodeDir.toString)
- .put("path.data", Paths.get(TemporaryLocalNodeDir.toString()).resolve("data").toString)
- .put("path.repo", Paths.get(TemporaryLocalNodeDir.toString()).resolve("repo").toString)
- .build()
-
- private def createTmpDir(): File = {
- val dirFile = File(System.getProperty("java.io.tmpdir") + "/elastic-" + UUID.randomUUID())
- dirFile.createDirectory()
- dirFile
- }
-
- // TODO: Ensure these Settings properties are used
- def writeTestRecords(props: Map[String, String]) = {
-
- val localNode = createLocalNode()
-
- val client: ElasticClient = createLocalNodeClient(localNode)
-
- val writer = new ElasticJsonWriter(new HttpKElasticClient(client), ElasticSettings(ElasticConfig(props)))
-
- writer.write(TestRecords)
- (localNode, client, writer)
- }
-
- }
-
- "A ElasticWriter should insert into Elastic Search a number of records" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigProps(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.stop()
- TemporaryLocalNodeDir.deleteRecursively()
-
- }
-
- "A ElasticWriter should update a number of records in Elastic Search" in new TestContext {
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkUpdateConfigProps(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- val testUpdateRecords = getUpdateTestRecord
-
- //Second run just updates
- writer.write(testUpdateRecords)
-
- Thread.sleep(2000)
-
- val updateRes = client.execute {
- search(INDEX)
- }.await
- updateRes.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.stop()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-
- "A ElasticWriter should update a number of records in Elastic Search with index suffix defined" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(autoCreate = true),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX_WITH_DATE)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.stop()
- TemporaryLocalNodeDir.deleteRecursively()
-
- }
-
- "It should fail writing to a non-existent index when auto creation is disabled" ignore new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(autoCreate = false, RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val searchResponse = client.execute {
- search(INDEX_WITH_DATE)
- }.await
- searchResponse.isError should be(true)
- searchResponse.error.`type` should be("index_not_found_exception")
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
-
- }
-
- "A ElasticWriter should insert into Elastic Search a number of records with the HTTP Client" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsHTTPClient(),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-
- "A ElasticWriter should insert into with PK Elastic Search a number of records" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigPropsPk(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.write(TestRecords)
-
- Thread.sleep(2000)
-
- val resUpdate = client.execute {
- search(INDEX)
- }.await
- resUpdate.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-
- "A ElasticWriter should insert into without PK Elastic Search a number of records" in new TestContext {
-
- val (node: ElasticsearchContainer, client: ElasticClient, writer: ElasticJsonWriter) = writeTestRecords(
- getElasticSinkConfigProps(RandomClusterName),
- )
-
- Thread.sleep(2000)
-
- val res = client.execute {
- search(INDEX)
- }.await
- res.result.totalHits shouldBe TestRecords.size
-
- writer.write(TestRecords)
-
- Thread.sleep(2000)
-
- val resUpdate = client.execute {
- search(INDEX)
- }.await
- resUpdate.result.totalHits shouldBe TestRecords.size
-
- writer.close()
- client.close()
- node.close()
- TemporaryLocalNodeDir.deleteRecursively()
- }
-}
diff --git a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ITBase.scala b/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ITBase.scala
deleted file mode 100644
index 7852cc7a1..000000000
--- a/kafka-connect-elastic7/src/it/scala/io/lenses/streamreactor/connect/elastic7/ITBase.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-/*
- * Copyright 2017 Datamountaineer.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfigConstants
-import org.apache.kafka.common.TopicPartition
-import org.apache.kafka.common.record.TimestampType
-import org.apache.kafka.connect.data.Schema
-import org.apache.kafka.connect.data.SchemaBuilder
-import org.apache.kafka.connect.data.Struct
-import org.apache.kafka.connect.sink.SinkRecord
-import org.scalatest.BeforeAndAfter
-import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.AnyWordSpec
-
-import java.time.LocalDateTime
-import java.time.format.DateTimeFormatter._
-import java.util
-import scala.collection.mutable
-import scala.jdk.CollectionConverters.SetHasAsScala
-
-trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
- val ELASTIC_SEARCH_HOSTNAMES = "localhost:9300"
- val BASIC_AUTH_USERNAME = "usertest"
- val BASIC_AUTH_PASSWORD = "userpassword"
- val TOPIC = "sink_test"
- val INDEX = "index_andrew"
- val INDEX_WITH_DATE = s"${INDEX}_${LocalDateTime.now.format(ofPattern("YYYY-MM-dd"))}"
- val QUERY = s"INSERT INTO $INDEX SELECT * FROM $TOPIC"
- val QUERY_PK = s"INSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
- val QUERY_SELECTION = s"INSERT INTO $INDEX SELECT id, string_field FROM $TOPIC"
- val UPDATE_QUERY = s"UPSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
- val UPDATE_QUERY_SELECTION = s"UPSERT INTO $INDEX SELECT id, string_field FROM $TOPIC PK id"
-
- protected val PARTITION: Int = 12
- protected val PARTITION2: Int = 13
- protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
- protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
- protected val ASSIGNMENT: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
- //Set topic assignments
- ASSIGNMENT.add(TOPIC_PARTITION)
- ASSIGNMENT.add(TOPIC_PARTITION2)
-
- //get the assignment of topic partitions for the sinkTask
- def getAssignment: util.Set[TopicPartition] =
- ASSIGNMENT
-
- //build a test record schema
- def createSchema: Schema =
- SchemaBuilder.struct.name("record")
- .version(1)
- .field("id", Schema.STRING_SCHEMA)
- .field("int_field", Schema.INT32_SCHEMA)
- .field("long_field", Schema.INT64_SCHEMA)
- .field("string_field", Schema.STRING_SCHEMA)
- .build
-
- def createSchemaNested: Schema =
- SchemaBuilder.struct.name("record")
- .version(1)
- .field("id", Schema.STRING_SCHEMA)
- .field("int_field", Schema.INT32_SCHEMA)
- .field("long_field", Schema.INT64_SCHEMA)
- .field("string_field", Schema.STRING_SCHEMA)
- .field("nested", createSchema)
- .build
-
- def createRecordNested(id: String): Struct =
- new Struct(createSchemaNested)
- .put("id", id)
- .put("int_field", 11)
- .put("long_field", 11L)
- .put("string_field", "11")
- .put("nested",
- new Struct(createSchema)
- .put("id", id)
- .put("int_field", 21)
- .put("long_field", 21L)
- .put("string_field", "21"),
- )
-
- //build a test record
- def createRecord(schema: Schema, id: String): Struct =
- new Struct(schema)
- .put("id", id)
- .put("int_field", 12)
- .put("long_field", 12L)
- .put("string_field", "foo")
-
- //generate some test records
- def getTestRecords(): Vector[SinkRecord] = {
- val schema = createSchema
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
-
- assignment.flatMap { a =>
- (1 to 7).map { i =>
- val record: Struct = createRecord(schema, a.topic() + "-" + a.partition() + "-" + i)
- new SinkRecord(a.topic(),
- a.partition(),
- Schema.STRING_SCHEMA,
- "key",
- schema,
- record,
- i.toLong,
- System.currentTimeMillis(),
- TimestampType.CREATE_TIME,
- )
- }
- }.toVector
- }
-
- def getTestRecordsNested: Vector[SinkRecord] = {
- val schema = createSchemaNested
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
-
- assignment.flatMap { a =>
- (1 to 7).map { i =>
- val record: Struct = createRecordNested(a.topic() + "-" + a.partition() + "-" + i)
- new SinkRecord(a.topic(),
- a.partition(),
- Schema.STRING_SCHEMA,
- "key",
- schema,
- record,
- i.toLong,
- System.currentTimeMillis(),
- TimestampType.CREATE_TIME,
- )
- }
- }.toVector
- }
-
- def getUpdateTestRecord: Vector[SinkRecord] = {
- val schema = createSchema
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
-
- assignment.flatMap { a =>
- (1 to 2).map { i =>
- val record: Struct = createRecord(schema, a.topic() + "-" + a.partition() + "-" + i)
- new SinkRecord(a.topic(),
- a.partition(),
- Schema.STRING_SCHEMA,
- "key",
- schema,
- record,
- i.toLong,
- System.currentTimeMillis(),
- TimestampType.CREATE_TIME,
- )
- }
- }.toVector
- }
-
- def getUpdateTestRecordNested: Vector[SinkRecord] = {
- val schema = createSchemaNested
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
-
- assignment.flatMap { a =>
- (1 to 2).map { i =>
- val record: Struct = createRecordNested(a.topic() + "-" + a.partition() + "-" + i)
- new SinkRecord(a.topic(),
- a.partition(),
- Schema.STRING_SCHEMA,
- "key",
- schema,
- record,
- i.toLong,
- System.currentTimeMillis(),
- TimestampType.CREATE_TIME,
- )
- }
- }.toVector
- }
-
- def getElasticSinkConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- getBaseElasticSinkConfigProps(QUERY, clusterName)
-
- def getElasticSinkConfigPropsSelection(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- getBaseElasticSinkConfigProps(QUERY_SELECTION, clusterName)
-
- def getElasticSinkConfigPropsPk(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- getBaseElasticSinkConfigProps(QUERY_PK, clusterName)
-
- def getElasticSinkUpdateConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- getBaseElasticSinkConfigProps(UPDATE_QUERY, clusterName)
-
- def getElasticSinkUpdateConfigPropsSelection(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- getBaseElasticSinkConfigProps(UPDATE_QUERY_SELECTION, clusterName)
-
- def getBaseElasticSinkConfigProps(
- query: String,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- Map(
- "topics" -> TOPIC,
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> query,
- )
-
- def getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(
- autoCreate: Boolean,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> (QUERY + (if (autoCreate) " AUTOCREATE "
- else "") + " WITHINDEXSUFFIX=_{YYYY-MM-dd}"),
- )
-
- def getElasticSinkConfigPropsHTTPClient(
- auth: Boolean = false,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> QUERY,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
- )
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriter.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriter.scala
deleted file mode 100644
index f8d5cd20f..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriter.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfigConstants
-import io.lenses.streamreactor.connect.elastic7.config.ElasticSettings
-import com.sksamuel.elastic4s.ElasticNodeEndpoint
-
-import scala.util.Failure
-import scala.util.Success
-import scala.util.Try
-
-object ElasticWriter {
-
- /**
- * Construct a JSONWriter.
- *
- * @param config An elasticSinkConfig to extract settings from.
- * @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
- */
- def apply(config: ElasticConfig): ElasticJsonWriter = {
-
- val hostNames = config.getString(ElasticConfigConstants.HOSTS).split(",")
- val protocol = config.getString(ElasticConfigConstants.PROTOCOL)
- val port = config.getInt(ElasticConfigConstants.ES_PORT)
- val prefix = Try(config.getString(ElasticConfigConstants.ES_PREFIX)) match {
- case Success("") => None
- case Success(configString) => Some(configString)
- case Failure(_) => None
- }
-
- val settings = ElasticSettings(config)
-
- new ElasticJsonWriter(
- KElasticClient.createHttpClient(settings, endpoints(hostNames, protocol, port, prefix).toIndexedSeq),
- settings,
- )
- }
-
- private def endpoints(hostNames: Array[String], protocol: String, port: Integer, prefix: Option[String]) =
- hostNames
- .map(hostname => ElasticNodeEndpoint(protocol, hostname, port, prefix))
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/KElasticClient.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/KElasticClient.scala
deleted file mode 100644
index 323df66af..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/KElasticClient.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.connect.elastic7.config.ElasticSettings
-import io.lenses.streamreactor.connect.elastic7.indexname.CreateIndex.getIndexName
-import com.sksamuel.elastic4s.requests.bulk.BulkRequest
-import com.sksamuel.elastic4s.requests.bulk.BulkResponse
-import com.sksamuel.elastic4s.ElasticClient
-import com.sksamuel.elastic4s.ElasticNodeEndpoint
-import com.sksamuel.elastic4s.ElasticProperties
-import com.sksamuel.elastic4s.Response
-import com.sksamuel.elastic4s.http.JavaClient
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.http.auth.AuthScope
-import org.apache.http.auth.UsernamePasswordCredentials
-import org.apache.http.client.config.RequestConfig.Builder
-import org.apache.http.impl.client.BasicCredentialsProvider
-import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
-
-import scala.concurrent.Future
-
-trait KElasticClient extends AutoCloseable {
- def index(kcql: Kcql): Unit
-
- def execute(definition: BulkRequest): Future[Any]
-}
-
-object KElasticClient extends StrictLogging {
-
- def createHttpClient(settings: ElasticSettings, endpoints: Seq[ElasticNodeEndpoint]): KElasticClient =
- if (settings.httpBasicAuthUsername.nonEmpty && settings.httpBasicAuthPassword.nonEmpty) {
- lazy val provider = {
- val provider = new BasicCredentialsProvider
- val credentials =
- new UsernamePasswordCredentials(settings.httpBasicAuthUsername, settings.httpBasicAuthPassword)
- provider.setCredentials(AuthScope.ANY, credentials)
- provider
- }
-
- val javaClient = JavaClient(
- ElasticProperties(endpoints),
- (requestConfigBuilder: Builder) => requestConfigBuilder,
- (httpClientBuilder: HttpAsyncClientBuilder) => httpClientBuilder.setDefaultCredentialsProvider(provider),
- )
-
- val client: ElasticClient = ElasticClient(javaClient)
- new HttpKElasticClient(client)
- } else {
- val client: ElasticClient = ElasticClient(JavaClient(ElasticProperties(endpoints)))
- new HttpKElasticClient(client)
- }
-}
-
-class HttpKElasticClient(client: ElasticClient) extends KElasticClient {
-
- import com.sksamuel.elastic4s.ElasticDsl._
-
- override def index(kcql: Kcql): Unit = {
- require(kcql.isAutoCreate, s"Auto-creating indexes hasn't been enabled for target:${kcql.getTarget}")
-
- val indexName = getIndexName(kcql)
- client.execute {
- createIndex(indexName)
- }
- ()
- }
-
- override def execute(definition: BulkRequest): Future[Response[BulkResponse]] = client.execute(definition)
-
- override def close(): Unit = client.close()
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/PrimaryKeyExtractor.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/PrimaryKeyExtractor.scala
deleted file mode 100644
index 5b54ff38e..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/PrimaryKeyExtractor.scala
+++ /dev/null
@@ -1,238 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7
-
-import com.fasterxml.jackson.databind.JsonNode
-import com.fasterxml.jackson.databind.node._
-import org.apache.kafka.connect.data._
-import org.apache.kafka.connect.errors.ConnectException
-
-import scala.annotation.tailrec
-import scala.jdk.CollectionConverters.IteratorHasAsScala
-import scala.jdk.CollectionConverters.ListHasAsScala
-
-object PrimaryKeyExtractor {
- def extract(node: JsonNode, path: Vector[String]): Any = {
- @tailrec
- def innerExtract(n: JsonNode, p: Vector[String]): Any = {
- def checkValidPath(): Unit =
- if (p.nonEmpty) {
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field",
- )
- }
-
- n match {
- case null => null
- case _: BinaryNode =>
- checkValidPath()
- n.binaryValue()
-
- case _: BooleanNode =>
- checkValidPath()
- n.booleanValue()
-
- case _: BigIntegerNode =>
- checkValidPath()
- n.bigIntegerValue()
- case _: DecimalNode =>
- checkValidPath()
- n.decimalValue()
- case _: DoubleNode =>
- checkValidPath()
- n.doubleValue()
- case _: FloatNode =>
- checkValidPath()
- n.floatValue()
- case _: IntNode =>
- checkValidPath()
- n.intValue()
- case _: LongNode =>
- checkValidPath()
- n.longValue()
- case _: ShortNode =>
- checkValidPath()
- n.shortValue()
- case _: TextNode =>
- checkValidPath()
- n.textValue()
- case _: NullNode =>
- checkValidPath()
- null
- case _: MissingNode =>
- checkValidPath()
- null
-
- case node: ObjectNode =>
- if (p.isEmpty) {
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. The path is not resolving to a primitive field",
- )
- }
- val childNode = Option(node.get(p.head)).getOrElse {
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. Can't find ${p.head} field. Field found are:${node.fieldNames().asScala.mkString(",")}",
- )
- }
-
- innerExtract(childNode, p.tail)
- case _: ArrayNode =>
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. The path is involving an array structure",
- )
-
- case other =>
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. $other is not handled",
- )
- }
- }
-
- if (node == null) {
- throw new NullPointerException("Invalid parameter 'node'")
- }
- innerExtract(node, path)
- }
-
- def extract(struct: Struct, path: Vector[String]): Any = {
- // @tailrec
- def innerExtract(field: Field, value: AnyRef, p: Vector[String]): Any = {
- def checkValidPath() =
- if (p.nonEmpty) {
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field",
- )
- }
-
- if (value == null) {
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. Field '${field.name()}' is null",
- )
- }
- Option(field.schema().name()).collect {
- case Decimal.LOGICAL_NAME =>
- value match {
- case bd: BigDecimal =>
- checkValidPath()
- bd
- case _: Array[Byte] =>
- checkValidPath()
- Decimal.toLogical(field.schema, value.asInstanceOf[Array[Byte]])
- }
- case Date.LOGICAL_NAME =>
- value.asInstanceOf[Any] match {
- case d: java.util.Date =>
- checkValidPath()
- d
- case i: Int =>
- checkValidPath()
- Date.toLogical(field.schema, i)
- case _ =>
- throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
- }
- case Time.LOGICAL_NAME =>
- value.asInstanceOf[Any] match {
- case i: Int =>
- checkValidPath()
- Time.toLogical(field.schema, i)
- case d: java.util.Date =>
- checkValidPath()
- d
- case _ =>
- throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
- }
- case Timestamp.LOGICAL_NAME =>
- value.asInstanceOf[Any] match {
- case l: Long =>
- checkValidPath()
- Timestamp.toLogical(field.schema, l)
- case d: java.util.Date =>
- checkValidPath()
- d
- case _ =>
- throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
- }
- }.getOrElse {
- val v = field.schema().`type`() match {
- case Schema.Type.BOOLEAN =>
- checkValidPath()
- value.asInstanceOf[Boolean]
- case Schema.Type.BYTES =>
- checkValidPath()
- value.asInstanceOf[Array[Byte]]
- case Schema.Type.FLOAT32 =>
- checkValidPath()
- value.asInstanceOf[Float]
- case Schema.Type.FLOAT64 =>
- checkValidPath()
- value.asInstanceOf[Double]
- case Schema.Type.INT8 =>
- checkValidPath()
- value.asInstanceOf[Byte]
- case Schema.Type.INT16 =>
- checkValidPath()
- value.asInstanceOf[Short]
- case Schema.Type.INT32 =>
- checkValidPath()
- value.asInstanceOf[Int]
- case Schema.Type.INT64 =>
- checkValidPath()
- value.asInstanceOf[Long]
- case Schema.Type.STRING =>
- checkValidPath()
- value.toString
-
- case Schema.Type.MAP =>
- if (p.isEmpty) {
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field. It resolves to:${field.schema()}",
- )
- }
- val map = value.asInstanceOf[java.util.Map[String, AnyRef]]
- val f = new Field(p.head, 0, field.schema().valueSchema())
-
- innerExtract(f, map.get(p.head), p.tail)
-
- case Schema.Type.STRUCT =>
- if (p.isEmpty) {
- throw new IllegalArgumentException(
- s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field. It resolves to:${field.schema()}",
- )
- }
- val s = value.asInstanceOf[Struct]
- val childField = Option(s.schema().field(p.head))
- .getOrElse {
- throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(
- ".",
- )}'. Can't find field '${p.head}'. Fields available:${s.schema().fields().asScala.map(_.name()).mkString(",")}")
- }
-
- innerExtract(childField, s.get(childField), p.tail)
- case other => throw new ConnectException(s"$other is not a recognized schema")
- }
- v
- }
- }
-
- val field = Option(struct.schema().field(path.head)).getOrElse {
- throw new IllegalArgumentException(
- s"Couldn't find field '${path.head}' in the schema:${struct.schema().fields().asScala.map(_.name()).mkString(",")}",
- )
- }
-
- innerExtract(field, struct.get(field), path.tail)
- }
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/Transform.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/Transform.scala
deleted file mode 100644
index 4eb4ea42d..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/Transform.scala
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.json.SimpleJsonConverter
-import com.fasterxml.jackson.annotation.JsonInclude
-import com.fasterxml.jackson.databind.JsonNode
-import io.lenses.connect.sql.StructSql._
-import io.lenses.json.sql.JacksonJson
-import io.lenses.json.sql.JsonSql._
-import io.lenses.sql.Field
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.kafka.connect.data.Schema
-import org.apache.kafka.connect.data.Struct
-
-import java.nio.ByteBuffer
-import scala.util.Failure
-import scala.util.Success
-import scala.util.Try
-
-private object Transform extends StrictLogging {
- lazy val simpleJsonConverter = new SimpleJsonConverter()
-
- def apply(
- fields: Seq[Field],
- schema: Schema,
- value: Any,
- withStructure: Boolean,
- ): JsonNode = {
- def raiseException(msg: String, t: Throwable) = throw new IllegalArgumentException(msg, t)
-
- if (value == null) {
- if (schema == null || !schema.isOptional) {
- raiseException("Null value is not allowed.", null)
- } else null
- } else {
- if (schema != null) {
- schema.`type`() match {
- case Schema.Type.BYTES =>
- //we expected to be json
- val array = value match {
- case a: Array[Byte] => a
- case b: ByteBuffer => b.array()
- case other => raiseException(s"Invalid payload:$other for schema Schema.BYTES.", null)
- }
-
- Try(JacksonJson.mapper.readTree(array)) match {
- case Failure(e) => raiseException("Invalid json.", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Failure(e) => raiseException(s"A KCQL exception occurred. ${e.getMessage}", e)
- case Success(jn) => jn
- }
- }
-
- case Schema.Type.STRING =>
- //we expected to be json
- Try(JacksonJson.asJson(value.asInstanceOf[String])) match {
- case Failure(e) => raiseException("Invalid json", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Success(jn) => jn
- case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
- }
- }
-
- case Schema.Type.STRUCT =>
- val struct = value.asInstanceOf[Struct]
- Try(struct.sql(fields, !withStructure)) match {
- case Success(s) => simpleJsonConverter.fromConnectData(s.schema(), s)
-
- case Failure(e) => raiseException(s"A KCQL error occurred.${e.getMessage}", e)
- }
-
- case other => raiseException(s"Can't transform Schema type:$other.", null)
- }
- } else {
- //we can handle java.util.Map (this is what JsonConverter can spit out)
- value match {
- case m: java.util.Map[_, _] =>
- val map = m.asInstanceOf[java.util.Map[String, Any]]
- val jsonNode: JsonNode =
- JacksonJson.mapper.setSerializationInclusion(JsonInclude.Include.ALWAYS).valueToTree[JsonNode](map)
- Try(jsonNode.sql(fields, !withStructure)) match {
- case Success(j) => j
- case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
- }
- case s: String =>
- Try(JacksonJson.asJson(s)) match {
- case Failure(e) => raiseException("Invalid json.", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Success(jn) => jn
- case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
- }
- }
-
- case b: Array[Byte] =>
- Try(JacksonJson.mapper.readTree(b)) match {
- case Failure(e) => raiseException("Invalid json.", e)
- case Success(json) =>
- Try(json.sql(fields, !withStructure)) match {
- case Failure(e) => raiseException(s"A KCQL exception occurred. ${e.getMessage}", e)
- case Success(jn) => jn
- }
- }
- //we take it as String
- case other => raiseException(s"Value:$other is not handled!", null)
- }
- }
- }
- }
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticConfig.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticConfig.scala
deleted file mode 100644
index 31237b4c6..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticConfig.scala
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7.config
-
-import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.common.config.base.traits.BaseConfig
-import io.lenses.streamreactor.common.config.base.traits.ErrorPolicySettings
-import io.lenses.streamreactor.common.config.base.traits.NumberRetriesSettings
-import io.lenses.streamreactor.common.config.base.traits.WriteTimeoutSettings
-import org.apache.kafka.common.config.ConfigDef
-import org.apache.kafka.common.config.ConfigDef.Importance
-import org.apache.kafka.common.config.ConfigDef.Type
-
-object ElasticConfig {
-
- val config: ConfigDef = new ConfigDef()
- .define(
- ElasticConfigConstants.PROTOCOL,
- Type.STRING,
- ElasticConfigConstants.PROTOCOL_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.PROTOCOL_DOC,
- "Connection",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.PROTOCOL,
- )
- .define(
- ElasticConfigConstants.HOSTS,
- Type.STRING,
- ElasticConfigConstants.HOSTS_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.HOSTS_DOC,
- "Connection",
- 2,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_PORT,
- Type.INT,
- ElasticConfigConstants.ES_PORT_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_PORT_DOC,
- "Connection",
- 3,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_PREFIX,
- Type.STRING,
- ElasticConfigConstants.ES_PREFIX_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_PREFIX_DOC,
- "Connection",
- 4,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_CLUSTER_NAME,
- Type.STRING,
- ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_CLUSTER_NAME_DOC,
- "Connection",
- 5,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.ES_CLUSTER_NAME,
- )
- .define(
- ElasticConfigConstants.WRITE_TIMEOUT_CONFIG,
- Type.INT,
- ElasticConfigConstants.WRITE_TIMEOUT_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.WRITE_TIMEOUT_DOC,
- "Connection",
- 6,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.WRITE_TIMEOUT_DISPLAY,
- )
- .define(
- ElasticConfigConstants.BATCH_SIZE_CONFIG,
- Type.INT,
- ElasticConfigConstants.BATCH_SIZE_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.BATCH_SIZE_DOC,
- "Connection",
- 7,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.BATCH_SIZE_DISPLAY,
- )
- .define(
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME,
- Type.STRING,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC,
- "Connection",
- 8,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME,
- )
- .define(
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD,
- Type.STRING,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC,
- "Connection",
- 9,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD,
- )
- .define(
- ElasticConfigConstants.ERROR_POLICY_CONFIG,
- Type.STRING,
- ElasticConfigConstants.ERROR_POLICY_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ERROR_POLICY_DOC,
- "Error",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.ERROR_POLICY_CONFIG,
- )
- .define(
- ElasticConfigConstants.NBR_OF_RETRIES_CONFIG,
- Type.INT,
- ElasticConfigConstants.NBR_OF_RETIRES_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.NBR_OF_RETRIES_DOC,
- "Error",
- 2,
- ConfigDef.Width.SHORT,
- ElasticConfigConstants.NBR_OF_RETRIES_CONFIG,
- )
- .define(
- ElasticConfigConstants.ERROR_RETRY_INTERVAL,
- Type.INT,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL_DOC,
- "Error",
- 3,
- ConfigDef.Width.LONG,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL,
- )
- .define(
- ElasticConfigConstants.KCQL,
- Type.STRING,
- Importance.HIGH,
- ElasticConfigConstants.KCQL_DOC,
- "KCQL",
- 1,
- ConfigDef.Width.LONG,
- ElasticConfigConstants.KCQL,
- )
- .define(
- ElasticConfigConstants.PK_JOINER_SEPARATOR,
- Type.STRING,
- ElasticConfigConstants.PK_JOINER_SEPARATOR_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.PK_JOINER_SEPARATOR_DOC,
- "KCQL",
- 2,
- ConfigDef.Width.SHORT,
- ElasticConfigConstants.PK_JOINER_SEPARATOR,
- )
- .define(
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED,
- Type.BOOLEAN,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DOC,
- "Metrics",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DISPLAY,
- )
-}
-
-/**
- * ElasticSinkConfig
- *
- * Holds config, extends AbstractConfig.
- */
-case class ElasticConfig(props: Map[String, String])
- extends BaseConfig(ElasticConfigConstants.CONNECTOR_PREFIX, ElasticConfig.config, props)
- with WriteTimeoutSettings
- with ErrorPolicySettings
- with NumberRetriesSettings {
- val kcqlConstant: String = ElasticConfigConstants.KCQL
-
- def getKcql(): Seq[Kcql] =
- getString(kcqlConstant).split(";").filter(_.trim.nonEmpty).map(Kcql.parse).toIndexedSeq
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticConfigConstants.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticConfigConstants.scala
deleted file mode 100644
index 6de86f2d5..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticConfigConstants.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7.config
-
-import io.lenses.streamreactor.common.config.base.const.TraitConfigConst._
-
-object ElasticConfigConstants {
-
- val CONNECTOR_PREFIX = "connect.elastic"
-
- val PROTOCOL = s"${CONNECTOR_PREFIX}.protocol"
- val PROTOCOL_DOC = "URL protocol (http, https)"
- val PROTOCOL_DEFAULT = "http"
-
- val HOSTS = s"${CONNECTOR_PREFIX}.${CONNECTION_HOSTS_SUFFIX}"
- val HOSTS_DOC = "List of hostnames for Elastic Search cluster node, not including protocol or port."
- val HOSTS_DEFAULT = "localhost"
-
- val ES_PORT = s"${CONNECTOR_PREFIX}.${CONNECTION_PORT_SUFFIX}"
- val ES_PORT_DOC = "Port on which Elastic Search node listens on"
- val ES_PORT_DEFAULT = 9300
-
- val ES_PREFIX = s"${CONNECTOR_PREFIX}.tableprefix"
- val ES_PREFIX_DOC = "Table prefix (optional)"
- val ES_PREFIX_DEFAULT = ""
-
- val ES_CLUSTER_NAME = s"${CONNECTOR_PREFIX}.${CLUSTER_NAME_SUFFIX}"
- val ES_CLUSTER_NAME_DOC = "Name of the elastic search cluster, used in local mode for setting the connection"
- val ES_CLUSTER_NAME_DEFAULT = "elasticsearch"
-
- val KCQL = s"${CONNECTOR_PREFIX}.${KCQL_PROP_SUFFIX}"
- val KCQL_DOC = "KCQL expression describing field selection and routes."
-
- val WRITE_TIMEOUT_CONFIG = s"${CONNECTOR_PREFIX}.${WRITE_TIMEOUT_SUFFIX}"
- val WRITE_TIMEOUT_DOC = "The time to wait in millis. Default is 5 minutes."
- val WRITE_TIMEOUT_DISPLAY = "Write timeout"
- val WRITE_TIMEOUT_DEFAULT = 300000
-
- val CLIENT_HTTP_BASIC_AUTH_USERNAME = s"$CONNECTOR_PREFIX.use.http.username"
- val CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT = ""
- val CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC = "Username if HTTP Basic Auth required default is null."
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD = s"$CONNECTOR_PREFIX.use.http.password"
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT = ""
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC = "Password if HTTP Basic Auth required default is null."
-
- val NBR_OF_RETRIES_CONFIG = s"${CONNECTOR_PREFIX}.${MAX_RETRIES_PROP_SUFFIX}"
- val NBR_OF_RETRIES_DOC = "The maximum number of times to try the write again."
- val NBR_OF_RETIRES_DEFAULT = 20
-
- val ERROR_POLICY_CONFIG = s"${CONNECTOR_PREFIX}.${ERROR_POLICY_PROP_SUFFIX}"
- val ERROR_POLICY_DOC: String =
- """Specifies the action to be taken if an error occurs while inserting the data
- |There are two available options:
- |NOOP - the error is swallowed
- |THROW - the error is allowed to propagate.
- |RETRY - The exception causes the Connect framework to retry the message. The number of retries is based on
- |The error will be logged automatically""".stripMargin
- val ERROR_POLICY_DEFAULT = "THROW"
-
- val BATCH_SIZE_CONFIG = s"$CONNECTOR_PREFIX.$BATCH_SIZE_PROP_SUFFIX"
- val BATCH_SIZE_DOC =
- "How many records to process at one time. As records are pulled from Kafka it can be 100k+ which will not be feasible to throw at Elastic search at once"
- val BATCH_SIZE_DISPLAY = "Batch size"
- val BATCH_SIZE_DEFAULT = 4000
-
- val ERROR_RETRY_INTERVAL = s"${CONNECTOR_PREFIX}.${RETRY_INTERVAL_PROP_SUFFIX}"
- val ERROR_RETRY_INTERVAL_DOC = "The time in milliseconds between retries."
- val ERROR_RETRY_INTERVAL_DEFAULT = "60000"
-
- /*
- val INDEX_NAME_SUFFIX = s"${CONNECTOR_PREFIX}.index.suffix"
- val INDEX_NAME_SUFFIX_DOC = "Suffix to append to the index name. Supports date time notation inside curly brackets. E.g. 'abc_{YYYY-MM-dd}_def'"
- val INDEX_NAME_SUFFIX_DEFAULT: String = null
-
- val AUTO_CREATE_INDEX = s"${CONNECTOR_PREFIX}.index.auto.create"
- val AUTO_CREATE_INDEX_DOC = "The flag enables/disables auto creating the ElasticSearch index. Boolean value required. Defaults to TRUE."
- val AUTO_CREATE_INDEX_DEFAULT = true
-
- val DOCUMENT_TYPE = s"${CONNECTOR_PREFIX}.document.type"
- val DOCUMENT_TYPE_DOC = "Sets the ElasticSearch document type. See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-type-field.html for more info."
- val DOCUMENT_TYPE_DEFAULT: String = null
- */
-
- val PROGRESS_COUNTER_ENABLED = PROGRESS_ENABLED_CONST
- val PROGRESS_COUNTER_ENABLED_DOC = "Enables the output for how many records have been processed"
- val PROGRESS_COUNTER_ENABLED_DEFAULT = false
- val PROGRESS_COUNTER_ENABLED_DISPLAY = "Enable progress counter"
-
- val PK_JOINER_SEPARATOR = s"$CONNECTOR_PREFIX.pk.separator"
- val PK_JOINER_SEPARATOR_DOC = "Separator used when have more that one field in PK"
- val PK_JOINER_SEPARATOR_DEFAULT = "-"
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticSettings.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticSettings.scala
deleted file mode 100644
index 83341edd3..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/config/ElasticSettings.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7.config
-
-import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.common.errors.ErrorPolicy
-
-/**
- * Created by andrew@datamountaineer.com on 13/05/16.
- * stream-reactor-maven
- */
-case class ElasticSettings(
- kcqls: Seq[Kcql],
- errorPolicy: ErrorPolicy,
- taskRetries: Int = ElasticConfigConstants.NBR_OF_RETIRES_DEFAULT,
- writeTimeout: Int = ElasticConfigConstants.WRITE_TIMEOUT_DEFAULT,
- batchSize: Int = ElasticConfigConstants.BATCH_SIZE_DEFAULT,
- pkJoinerSeparator: String = ElasticConfigConstants.PK_JOINER_SEPARATOR_DEFAULT,
- httpBasicAuthUsername: String = ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
- httpBasicAuthPassword: String = ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
-)
-
-object ElasticSettings {
-
- def apply(config: ElasticConfig): ElasticSettings = {
- val kcql = config.getKcql()
- val pkJoinerSeparator = config.getString(ElasticConfigConstants.PK_JOINER_SEPARATOR)
- val writeTimeout = config.getWriteTimeout
- val errorPolicy = config.getErrorPolicy
- val retries = config.getNumberRetries
- val httpBasicAuthUsername = config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME)
- val httpBasicAuthPassword = config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD)
-
- val batchSize = config.getInt(ElasticConfigConstants.BATCH_SIZE_CONFIG)
-
- ElasticSettings(kcql,
- errorPolicy,
- retries,
- writeTimeout,
- batchSize,
- pkJoinerSeparator,
- httpBasicAuthUsername,
- httpBasicAuthPassword,
- )
- }
-}
diff --git a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/IndexNameFragment.scala b/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/IndexNameFragment.scala
deleted file mode 100644
index 6581339ca..000000000
--- a/kafka-connect-elastic7/src/main/scala/io/lenses/streamreactor/connect/elastic7/indexname/IndexNameFragment.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7.indexname
-
-import java.time.Clock
-import java.time.LocalDateTime._
-import java.time.format.DateTimeFormatter._
-
-object ClockProvider {
- val ClockInstance: Clock = Clock.systemUTC()
-}
-
-sealed trait IndexNameFragment {
- def getFragment: String
-}
-
-case class TextFragment(text: String) extends IndexNameFragment {
- override def getFragment: String = text
-}
-
-case class DateTimeFragment(dateTimeFormat: String, clock: Clock = ClockProvider.ClockInstance)
- extends IndexNameFragment {
- override def getFragment: String = s"${now(clock).format(ofPattern(dateTimeFormat))}"
-}
-object DateTimeFragment {
- val OpeningChar = '{'
- val ClosingChar = '}'
-}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/CreateIndexTest.scala b/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/CreateIndexTest.scala
deleted file mode 100644
index 740d3f125..000000000
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/CreateIndexTest.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.connect.elastic7.indexname.CreateIndex
-import org.joda.time.DateTime
-import org.joda.time.DateTimeZone
-import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.AnyWordSpec
-
-class CreateIndexTest extends AnyWordSpec with Matchers {
- "CreateIndex" should {
- "create an index name without suffix when suffix not set" in {
- val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA")
- CreateIndex.getIndexName(kcql) shouldBe "index_name"
- }
-
- "create an index name with suffix when suffix is set" in {
- val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA WITHINDEXSUFFIX=_suffix_{YYYY-MM-dd}")
-
- val formattedDateTime = new DateTime(DateTimeZone.UTC).toString("YYYY-MM-dd")
- CreateIndex.getIndexName(kcql) shouldBe s"index_name_suffix_$formattedDateTime"
- }
- }
-}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticConfigTest.scala b/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticConfigTest.scala
deleted file mode 100644
index f775f1c6f..000000000
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticConfigTest.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfigConstants
-
-class ElasticConfigTest extends TestBase {
- "A ElasticConfig should return the client mode and hostnames" in {
- val config = new ElasticConfig(getElasticSinkConfigProps())
- config.getString(ElasticConfigConstants.HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
- config.getString(ElasticConfigConstants.ES_CLUSTER_NAME) shouldBe ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT
- config.getString(ElasticConfigConstants.KCQL) shouldBe QUERY
- }
-
- "A ElasticConfig should return the http basic auth username and password when set" in {
- val config = new ElasticConfig(getElasticSinkConfigPropsHTTPClient(auth = true))
- config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME) shouldBe BASIC_AUTH_USERNAME
- config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD) shouldBe BASIC_AUTH_PASSWORD
- }
-}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/TestBase.scala b/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/TestBase.scala
deleted file mode 100644
index 9b1005541..000000000
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/TestBase.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfigConstants
-import org.apache.kafka.common.TopicPartition
-import org.scalatest.BeforeAndAfter
-import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.AnyWordSpec
-
-import java.time.LocalDateTime
-import java.time.format.DateTimeFormatter._
-import java.util
-
-trait TestBase extends AnyWordSpec with Matchers with BeforeAndAfter {
- val ELASTIC_SEARCH_HOSTNAMES = "localhost:9300"
- val BASIC_AUTH_USERNAME = "usertest"
- val BASIC_AUTH_PASSWORD = "userpassword"
- val TOPIC = "sink_test"
- val INDEX = "index_andrew"
- val INDEX_WITH_DATE = s"${INDEX}_${LocalDateTime.now.format(ofPattern("YYYY-MM-dd"))}"
- val QUERY = s"INSERT INTO $INDEX SELECT * FROM $TOPIC"
- val QUERY_PK = s"INSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
- val QUERY_SELECTION = s"INSERT INTO $INDEX SELECT id, string_field FROM $TOPIC"
- val UPDATE_QUERY = s"UPSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
- val UPDATE_QUERY_SELECTION = s"UPSERT INTO $INDEX SELECT id, string_field FROM $TOPIC PK id"
-
- protected val PARTITION: Int = 12
- protected val PARTITION2: Int = 13
- protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
- protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
- protected val ASSIGNMENT: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
- //Set topic assignments
- ASSIGNMENT.add(TOPIC_PARTITION)
- ASSIGNMENT.add(TOPIC_PARTITION2)
-
- //get the assignment of topic partitions for the sinkTask
- def getAssignment: util.Set[TopicPartition] =
- ASSIGNMENT
-
- def getElasticSinkConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- getBaseElasticSinkConfigProps(QUERY, clusterName)
-
- def getBaseElasticSinkConfigProps(
- query: String,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- Map(
- "topics" -> TOPIC,
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> query,
- )
-
- def getElasticSinkConfigPropsHTTPClient(
- auth: Boolean = false,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): Map[String, String] =
- Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> QUERY,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
- )
-}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/CustomIndexNameTest.scala b/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/CustomIndexNameTest.scala
deleted file mode 100644
index 62623c5ba..000000000
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/CustomIndexNameTest.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2017-2024 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.lenses.streamreactor.connect.elastic7.indexname
-
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should.Matchers
-import org.scalatest.prop.TableDrivenPropertyChecks
-
-class CustomIndexNameTest extends AnyFlatSpec with TableDrivenPropertyChecks with Matchers {
-
- val ValidIndexNames = Table(
- ("Valid index name", "Expectations"),
- ("", Vector()),
- ("abc", Vector(TextFragment("abc"))),
- ("abc{YYYY-MM-dd}", Vector(TextFragment("abc"), DateTimeFragment("YYYY-MM-dd"))),
- ("{YYYY-MM-dd}abc", Vector(DateTimeFragment("YYYY-MM-dd"), TextFragment("abc"))),
- ("{YYYY-MM-dd}abc{HH-MM-ss}",
- Vector(DateTimeFragment("YYYY-MM-dd"), TextFragment("abc"), DateTimeFragment("HH-MM-ss")),
- ),
- ("{YYYY-MM-dd}{HH-MM-ss}", Vector(DateTimeFragment("YYYY-MM-dd"), DateTimeFragment("HH-MM-ss"))),
- ("abc{}", Vector(TextFragment("abc"))),
- ("{}abc", Vector(TextFragment("abc"))),
- )
-
- val InvalidIndexNames = Table(
- "Invalid index name",
- "}abc",
- "abc}",
- "abc}def",
- )
-
- "Custom index name" should "parse a valid String with date time formatting options" in {
- forAll(ValidIndexNames) {
- case (validIndexName, expectations) =>
- CustomIndexName.parseIndexName(validIndexName) shouldBe CustomIndexName(expectations)
- }
- }
-
- it should "throw an exception when using invalid index name" in {
- forAll(InvalidIndexNames) {
- case (invalidIndexName) =>
- intercept[InvalidCustomIndexNameException] {
- CustomIndexName.parseIndexName(invalidIndexName)
- }
- }
- }
-
- it should "return a valid String from a list of fragments" in new ClockFixture {
- CustomIndexName(
- Vector(DateTimeFragment("YYYY-MM-dd", TestClock), TextFragment("ABC"), DateTimeFragment("HH:mm:ss", TestClock)),
- ).toString shouldBe "2016-10-02ABC14:00:00"
- }
-}
diff --git a/kafka-connect-elastic6/src/fun/resources/logback.xml b/kafka-connect-elastic8/src/fun/resources/logback.xml
similarity index 100%
rename from kafka-connect-elastic6/src/fun/resources/logback.xml
rename to kafka-connect-elastic8/src/fun/resources/logback.xml
diff --git a/kafka-connect-elastic7/src/fun/scala/io/lenses/streamreactor/connect/Elastic7Test.scala b/kafka-connect-elastic8/src/fun/scala/io/lenses/streamreactor/connect/Elastic8Test.scala
similarity index 91%
rename from kafka-connect-elastic7/src/fun/scala/io/lenses/streamreactor/connect/Elastic7Test.scala
rename to kafka-connect-elastic8/src/fun/scala/io/lenses/streamreactor/connect/Elastic8Test.scala
index b96da0fa8..babd9e169 100644
--- a/kafka-connect-elastic7/src/fun/scala/io/lenses/streamreactor/connect/Elastic7Test.scala
+++ b/kafka-connect-elastic8/src/fun/scala/io/lenses/streamreactor/connect/Elastic8Test.scala
@@ -1,4 +1,5 @@
package io.lenses.streamreactor.connect
+
import cats.effect.IO
import cats.effect.testing.scalatest.AsyncIOSpec
import com.jayway.jsonpath.JsonPath
@@ -20,13 +21,13 @@ import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse.BodyHandlers
-class Elastic7Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorContainerPerSuite with Matchers {
+class Elastic8Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorContainerPerSuite with Matchers {
- lazy val container: ElasticsearchContainer = ElasticsearchContainer(dockerTag = "7.17.8").withNetwork(network)
+ lazy val container: ElasticsearchContainer = ElasticsearchContainer("elastic8").withNetwork(network)
override val schemaRegistryContainer: Option[SchemaRegistryContainer] = None
- override val connectorModule: String = "elastic7"
+ override val connectorModule: String = "elastic8"
override def beforeAll(): Unit = {
container.start()
@@ -38,7 +39,7 @@ class Elastic7Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorCont
container.stop()
}
- behavior of "Elastic7 connector"
+ behavior of "Elastic8 connector"
it should "sink records" in {
val resources = for {
@@ -84,13 +85,13 @@ class Elastic7Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorCont
ConnectorConfiguration(
"elastic-sink",
Map(
- "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.elastic7.ElasticSinkConnector"),
+ "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.elastic8.Elastic8SinkConnector"),
"tasks.max" -> ConfigValue(1),
"topics" -> ConfigValue("orders"),
"connect.elastic.protocol" -> ConfigValue("http"),
- "connect.elastic.hosts" -> ConfigValue(container.networkAlias),
+ "connect.elastic.hosts" -> ConfigValue(container.setup.key),
"connect.elastic.port" -> ConfigValue(Integer.valueOf(container.port)),
- "connect.elastic.cluster.name" -> ConfigValue("elasticsearch"),
+ "connect.elastic.cluster.name" -> ConfigValue(container.setup.key),
"connect.elastic.kcql" -> ConfigValue("INSERT INTO orders SELECT * FROM orders"),
"connect.progress.enabled" -> ConfigValue(true),
),
diff --git a/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterSelectionTest.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterSelectionTest.scala
new file mode 100644
index 000000000..3a7160298
--- /dev/null
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterSelectionTest.scala
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2017 Datamountaineer.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.lenses.streamreactor.connect.elastic8
+
+import org.apache.kafka.connect.sink.SinkTaskContext
+import org.mockito.MockitoSugar.mock
+import org.mockito.MockitoSugar.when
+
+import scala.jdk.CollectionConverters.SetHasAsJava
+
+class ElasticJsonWriterSelectionTest extends ITBase {
+
+ "A ElasticWriter should insert into Elastic Search a number of records" in {
+
+ mockContextForAssignment()
+
+ val props = getElasticSinkConfigPropsSelection()
+ writeAndVerifyTestRecords(props, getTestRecords)
+ }
+
+ "A ElasticWriter should insert into Elastic Search a number of records when nested fields are selected" in {
+
+ mockContextForAssignment()
+
+ val props = getBaseElasticSinkConfigProps(s"INSERT INTO $INDEX SELECT id, nested.string_field FROM $TOPIC")
+ writeAndVerifyTestRecords(props, getTestRecordsNested)
+
+ }
+
+ "A ElasticWriter should update records in Elastic Search" in {
+
+ mockContextForAssignment()
+
+ val props = getElasticSinkUpdateConfigPropsSelection()
+ writeAndVerifyTestRecords(props, getTestRecords, getUpdateTestRecord)
+
+ }
+
+ "A ElasticWriter should update records in Elastic Search with PK nested field" in {
+
+ mockContextForAssignment()
+
+ val props =
+ getBaseElasticSinkConfigProps(s"UPSERT INTO $INDEX SELECT nested.id, string_field FROM $TOPIC PK nested.id")
+ writeAndVerifyTestRecords(props, getTestRecordsNested, getUpdateTestRecordNested)
+
+ }
+
+ private def mockContextForAssignment(): Unit = {
+ //mock the context to return our assignment when called
+ val context = mock[SinkTaskContext]
+ when(context.assignment()).thenReturn(ASSIGNMENT.asJava)
+ ()
+ }
+
+}
diff --git a/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterTest.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterTest.scala
new file mode 100644
index 000000000..c9611b9de
--- /dev/null
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterTest.scala
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2017 Datamountaineer.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.lenses.streamreactor.connect.elastic8
+
+import com.sksamuel.elastic4s.ElasticDsl._
+import io.lenses.streamreactor.connect.elastic8.client.Elastic8ClientWrapper
+import org.apache.kafka.connect.sink.SinkRecord
+import org.mockito.MockitoSugar
+import org.scalatest.BeforeAndAfterEach
+import org.scalatest.concurrent.Eventually.eventually
+
+import java.util.UUID
+import scala.util.Using
+
+class ElasticJsonWriterTest extends ITBase with MockitoSugar with BeforeAndAfterEach {
+
+ class TestContext {
+
+ val RandomClusterName: String = UUID.randomUUID().toString
+ val TestRecords: Vector[SinkRecord] = getTestRecords
+
+ }
+
+ "A ElasticWriter should insert into Elastic Search a number of records" in new TestContext {
+
+ val props = getElasticSinkConfigProps(RandomClusterName)
+
+ writeAndVerifyTestRecords(props, TestRecords)
+
+ }
+
+ "A ElasticWriter should update a number of records in Elastic Search" in new TestContext {
+ val props = getElasticSinkUpdateConfigProps(RandomClusterName)
+
+ writeAndVerifyTestRecords(props, getTestRecords, getUpdateTestRecord)
+ }
+
+ "A ElasticWriter should update a number of records in Elastic Search with index suffix defined" in new TestContext {
+
+ val props = getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(autoCreate = true)
+
+ writeAndVerifyTestRecords(props, getTestRecords, getUpdateTestRecord, INDEX_WITH_DATE)
+
+ }
+
+ "It should fail writing to a non-existent index when auto creation is disabled" in new TestContext {
+
+ val props = getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(autoCreate = false, RandomClusterName)
+
+ Using.resource(LocalNode()) {
+ case LocalNode(_, client) =>
+ Using.resource(createElasticJsonWriter(new Elastic8ClientWrapper(client), props)) {
+ writer =>
+ writer.write(TestRecords)
+ eventually {
+ val searchResponse = client.execute {
+ search(INDEX_WITH_DATE)
+ }.await
+ searchResponse.isError should be(true)
+ searchResponse.error.`type` should be("index_not_found_exception")
+ }
+ }
+ }
+
+ }
+
+ "A ElasticWriter should insert into Elastic Search a number of records with the HTTP Client" in new TestContext {
+
+ val props = getElasticSinkConfigPropsHTTPClient()
+
+ writeAndVerifyTestRecords(props, TestRecords)
+ }
+
+ "A ElasticWriter should insert into with PK Elastic Search a number of records" in new TestContext {
+
+ val props = getElasticSinkConfigPropsPk(RandomClusterName)
+
+ writeAndVerifyTestRecords(props, TestRecords, TestRecords)
+
+ }
+
+ "A ElasticWriter should insert into without PK Elastic Search a number of records" in new TestContext {
+
+ val props = getElasticSinkConfigProps(RandomClusterName)
+
+ writeAndVerifyTestRecords(props, TestRecords, TestRecords)
+
+ }
+}
diff --git a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ITBase.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ITBase.scala
similarity index 60%
rename from kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ITBase.scala
rename to kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ITBase.scala
index 5412fb5a4..9705b548d 100644
--- a/kafka-connect-elastic6/src/it/scala/io/lenses/streamreactor/connect/elastic6/ITBase.scala
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ITBase.scala
@@ -14,26 +14,37 @@
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6
+package io.lenses.streamreactor.connect.elastic8
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfigConstants
+import cats.effect.unsafe.implicits.global
+import com.sksamuel.elastic4s.ElasticClient
+import com.sksamuel.elastic4s.ElasticDsl._
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticJsonWriter
+import io.lenses.streamreactor.connect.elastic8.client.Elastic8ClientWrapper
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.record.TimestampType
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.data.SchemaBuilder
import org.apache.kafka.connect.data.Struct
import org.apache.kafka.connect.sink.SinkRecord
-import org.scalatest.BeforeAndAfter
+import org.scalatest.concurrent.Eventually.eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
+import org.scalatest.Assertion
+import org.scalatest.BeforeAndAfter
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter._
-import java.util
-import scala.collection.mutable
-import scala.jdk.CollectionConverters.SetHasAsScala
+import scala.util.Using
trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
+
+ val configDef = new Elastic8ConfigDef
+ import configDef._
+
val ELASTIC_SEARCH_HOSTNAMES = "localhost:9300"
val BASIC_AUTH_USERNAME = "usertest"
val BASIC_AUTH_PASSWORD = "userpassword"
@@ -46,18 +57,11 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
val UPDATE_QUERY = s"UPSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
val UPDATE_QUERY_SELECTION = s"UPSERT INTO $INDEX SELECT id, string_field FROM $TOPIC PK id"
- protected val PARTITION: Int = 12
- protected val PARTITION2: Int = 13
- protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
- protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
- protected val ASSIGNMENT: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
- //Set topic assignments
- ASSIGNMENT.add(TOPIC_PARTITION)
- ASSIGNMENT.add(TOPIC_PARTITION2)
-
- //get the assignment of topic partitions for the sinkTask
- def getAssignment: util.Set[TopicPartition] =
- ASSIGNMENT
+ protected val PARTITION: Int = 12
+ protected val PARTITION2: Int = 13
+ protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
+ protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
+ protected val ASSIGNMENT: Set[TopicPartition] = Set(TOPIC_PARTITION, TOPIC_PARTITION2)
//build a test record schema
def createSchema: Schema =
@@ -102,11 +106,10 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
.put("string_field", "foo")
//generate some test records
- def getTestRecords(): Vector[SinkRecord] = {
+ def getTestRecords: Vector[SinkRecord] = {
val schema = createSchema
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 7).map { i =>
val record: Struct = createRecord(schema, a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -125,9 +128,8 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
def getTestRecordsNested: Vector[SinkRecord] = {
val schema = createSchemaNested
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 7).map { i =>
val record: Struct = createRecordNested(a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -146,9 +148,8 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
def getUpdateTestRecord: Vector[SinkRecord] = {
val schema = createSchema
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 2).map { i =>
val record: Struct = createRecord(schema, a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -167,9 +168,8 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
def getUpdateTestRecordNested: Vector[SinkRecord] = {
val schema = createSchemaNested
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 2).map { i =>
val record: Struct = createRecordNested(a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -187,68 +187,110 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
}
def getElasticSinkConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(QUERY, clusterName)
def getElasticSinkConfigPropsSelection(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(QUERY_SELECTION, clusterName)
def getElasticSinkConfigPropsPk(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(QUERY_PK, clusterName)
def getElasticSinkUpdateConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(UPDATE_QUERY, clusterName)
def getElasticSinkUpdateConfigPropsSelection(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(UPDATE_QUERY_SELECTION, clusterName)
def getBaseElasticSinkConfigProps(
query: String,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- "topics" -> TOPIC,
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> query,
+ "topics" -> TOPIC,
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> query,
)
def getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(
autoCreate: Boolean,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> (QUERY + (if (autoCreate) " AUTOCREATE "
- else "") + " WITHINDEXSUFFIX=_{YYYY-MM-dd}"),
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> (QUERY + (if (autoCreate) " AUTOCREATE "
+ else "") + " WITHINDEXSUFFIX=_{YYYY-MM-dd}"),
)
def getElasticSinkConfigPropsHTTPClient(
auth: Boolean = false,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> QUERY,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> QUERY,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
+ else
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
+ else
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
)
+
+ def writeRecords(writer: ElasticJsonWriter, records: Vector[SinkRecord]): Unit =
+ writer.write(records).attempt.map {
+ case Left(value) => fail(value)
+ case Right(_) => ()
+ }.unsafeRunSync()
+ protected def writeAndVerifyTestRecords(
+ props: Map[String, String],
+ testRecords: Vector[SinkRecord],
+ updateRecords: Vector[SinkRecord] = Vector.empty,
+ index: String = INDEX,
+ ): Any =
+ Using.resource(LocalNode()) {
+
+ case LocalNode(_, client) =>
+ Using.resource(createElasticJsonWriter(new Elastic8ClientWrapper(client), props)) {
+ writer =>
+ //write records to elastic
+ writeRecords(writer, testRecords)
+ checkCounts(testRecords, client, index)
+
+ if (updateRecords.nonEmpty) {
+ writeRecords(writer, updateRecords)
+ Thread.sleep(2000)
+ checkCounts(testRecords, client, index)
+ }
+ }
+ }
+
+ private def checkCounts(testRecords: Vector[SinkRecord], client: ElasticClient, index: String): Assertion =
+ eventually {
+ val res = client.execute {
+ search(index)
+ }.await
+ res.result.totalHits shouldBe testRecords.size
+ }
+
+ protected def createElasticJsonWriter(client: ElasticClientWrapper, props: Map[String, String]): ElasticJsonWriter =
+ ElasticCommonSettingsReader.read(new Elastic8ConfigDef, props).map(new ElasticJsonWriter(client, _)).getOrElse(fail(
+ "Unable to construct writer",
+ ))
+
}
diff --git a/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/LocalNode.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/LocalNode.scala
new file mode 100644
index 000000000..83dcc0ae7
--- /dev/null
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/LocalNode.scala
@@ -0,0 +1,35 @@
+package io.lenses.streamreactor.connect.elastic8
+
+import com.sksamuel.elastic4s.ElasticClient
+import com.sksamuel.elastic4s.ElasticProperties
+import com.sksamuel.elastic4s.http.JavaClient
+import org.testcontainers.elasticsearch.ElasticsearchContainer
+
+case class LocalNode(container: ElasticsearchContainer, client: ElasticClient) extends AutoCloseable {
+ override def close(): Unit = {
+ client.close()
+ container.stop()
+ }
+}
+
+object LocalNode {
+
+ private val url = "docker.elastic.co/elasticsearch/elasticsearch:8.10.1"
+
+ def apply(): LocalNode = {
+ val container = new ElasticsearchContainer(url)
+ container.withEnv("xpack.security.enabled", "false")
+ container.start()
+ LocalNode(
+ container,
+ createLocalNodeClient(
+ container,
+ ),
+ )
+ }
+
+ private def createLocalNodeClient(localNode: ElasticsearchContainer): ElasticClient = {
+ val esProps = ElasticProperties(s"http://${localNode.getHttpHostAddress}")
+ ElasticClient(JavaClient(esProps))
+ }
+}
diff --git a/kafka-connect-elastic6/src/main/resources/elastic-ascii.txt b/kafka-connect-elastic8/src/main/resources/elastic-ascii.txt
similarity index 100%
rename from kafka-connect-elastic6/src/main/resources/elastic-ascii.txt
rename to kafka-connect-elastic8/src/main/resources/elastic-ascii.txt
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkConnector.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkConnector.scala
new file mode 100644
index 000000000..2a8864461
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkConnector.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkConnector
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8Settings
+
+class Elastic8SinkConnector
+ extends ElasticSinkConnector[
+ Elastic8Settings,
+ Elastic8ConfigDef,
+ Elastic8SinkTask,
+ ](classOf[Elastic8SinkTask], new Elastic8ConfigDef) {}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkTask.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkTask.scala
new file mode 100644
index 000000000..dc4fdac5e
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkTask.scala
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkTask
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8Settings
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8SettingsReader
+import io.lenses.streamreactor.connect.elastic8.writers.Elastic8ClientCreator
+
+class Elastic8SinkTask
+ extends ElasticSinkTask[Elastic8Settings, Elastic8ConfigDef](
+ Elastic8SettingsReader,
+ Elastic8ClientCreator,
+ new Elastic8ConfigDef(),
+ "/elastic-ascii.txt",
+ ) {}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/client/Elastic8ClientWrapper.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/client/Elastic8ClientWrapper.scala
new file mode 100644
index 000000000..96389b8c1
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/client/Elastic8ClientWrapper.scala
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.client
+
+import cats.effect.IO
+import com.fasterxml.jackson.databind.JsonNode
+import com.sksamuel.elastic4s.ElasticDsl.{ createIndex => indexCreate, _ }
+import com.sksamuel.elastic4s.Index
+import com.sksamuel.elastic4s.Indexable
+import com.sksamuel.elastic4s.{ ElasticClient => UnderlyingElasticClient }
+import com.typesafe.scalalogging.LazyLogging
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.Request
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+
+class Elastic8ClientWrapper(client: UnderlyingElasticClient) extends ElasticClientWrapper with LazyLogging {
+
+ private case object IndexableJsonNode extends Indexable[JsonNode] {
+ override def json(t: JsonNode): String = t.toString
+ }
+
+ override def createIndex(indexName: String): IO[Unit] =
+ IO.fromFuture {
+ IO {
+ client.execute {
+ indexCreate(indexName)
+ }
+ }
+ } *> IO.unit
+
+ override def close(): IO[Unit] = IO {
+ client.close()
+ ()
+ }.recover { t: Throwable =>
+ logger.error("Error during OpenSearch client shutdown", t)
+ ()
+ }
+
+ override def execute(reqs: Seq[Request]): IO[Unit] =
+ IO.fromFuture {
+ IO {
+ val indexes = reqs.map {
+ case InsertRequest(index, id, json, pipeline) =>
+ indexInto(new Index(index))
+ .id(id)
+ .pipeline(pipeline)
+ .source(json.toString)
+ case UpsertRequest(index, id, json) =>
+ updateById(new Index(index), id)
+ .docAsUpsert(json)(IndexableJsonNode)
+ }
+ val bulkRequest = bulk(indexes).refreshImmediately
+ client.execute(bulkRequest)
+ }
+ } *> IO.unit
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8ConfigDef.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8ConfigDef.scala
new file mode 100644
index 000000000..a359052fa
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8ConfigDef.scala
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.config
+
+import io.lenses.streamreactor.common.config.base.const.TraitConfigConst._
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import org.apache.kafka.common.config.ConfigDef
+import org.apache.kafka.common.config.ConfigDef.Importance
+import org.apache.kafka.common.config.ConfigDef.Type
+
+class Elastic8ConfigDef extends ElasticConfigDef("connect.elastic") {
+
+ val PROTOCOL = s"$connectorPrefix.protocol"
+ val PROTOCOL_DOC = "URL protocol (http, https)"
+ val PROTOCOL_DEFAULT = "http"
+
+ val HOSTS = s"$connectorPrefix.$CONNECTION_HOSTS_SUFFIX"
+ val HOSTS_DOC = "List of hostnames for Elastic Search cluster node, not including protocol or port."
+ val HOSTS_DEFAULT = "localhost"
+
+ val ES_PORT = s"$connectorPrefix.$CONNECTION_PORT_SUFFIX"
+ val ES_PORT_DOC = "Port on which Elastic Search node listens on"
+ val ES_PORT_DEFAULT = 9300
+
+ val ES_PREFIX = s"$connectorPrefix.tableprefix"
+ val ES_PREFIX_DOC = "Table prefix (optional)"
+ val ES_PREFIX_DEFAULT = ""
+
+ val ES_CLUSTER_NAME = s"$connectorPrefix.$CLUSTER_NAME_SUFFIX"
+ val ES_CLUSTER_NAME_DOC = "Name of the elastic search cluster, used in local mode for setting the connection"
+ val ES_CLUSTER_NAME_DEFAULT = "elasticsearch"
+
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME = s"$connectorPrefix.use.http.username"
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC = "Username if HTTP Basic Auth required default is null."
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD = s"$connectorPrefix.use.http.password"
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC = "Password if HTTP Basic Auth required default is null."
+
+ override def configDef: ConfigDef = super.configDef
+ .define(
+ PROTOCOL,
+ Type.STRING,
+ PROTOCOL_DEFAULT,
+ Importance.LOW,
+ PROTOCOL_DOC,
+ "Connection",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ PROTOCOL,
+ )
+ .define(
+ HOSTS,
+ Type.STRING,
+ HOSTS_DEFAULT,
+ Importance.HIGH,
+ HOSTS_DOC,
+ "Connection",
+ 2,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PORT,
+ Type.INT,
+ ES_PORT_DEFAULT,
+ Importance.HIGH,
+ ES_PORT_DOC,
+ "Connection",
+ 3,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PREFIX,
+ Type.STRING,
+ ES_PREFIX_DEFAULT,
+ Importance.HIGH,
+ ES_PREFIX_DOC,
+ "Connection",
+ 4,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_CLUSTER_NAME,
+ Type.STRING,
+ ES_CLUSTER_NAME_DEFAULT,
+ Importance.HIGH,
+ ES_CLUSTER_NAME_DOC,
+ "Connection",
+ 5,
+ ConfigDef.Width.MEDIUM,
+ ES_CLUSTER_NAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC,
+ "Connection",
+ 8,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC,
+ "Connection",
+ 9,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ )
+
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8Settings.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8Settings.scala
new file mode 100644
index 000000000..edc5151dc
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8Settings.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.config
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettings
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
+
+/**
+ * Created by andrew@datamountaineer.com on 13/05/16.
+ * stream-reactor-maven
+ */
+case class Elastic8Settings(
+ elasticCommonSettings: ElasticCommonSettings,
+ httpBasicAuthUsername: String,
+ httpBasicAuthPassword: String,
+ hostnames: Seq[String],
+ protocol: String,
+ port: Int,
+ prefix: Option[String] = Option.empty,
+) extends ElasticSettings {
+ override def common: ElasticCommonSettings = elasticCommonSettings
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8SettingsReader.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8SettingsReader.scala
new file mode 100644
index 000000000..1975b5d57
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8SettingsReader.scala
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.config
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfig
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettingsReader
+
+import scala.util.Failure
+import scala.util.Success
+import scala.util.Try
+
+object Elastic8SettingsReader extends ElasticSettingsReader[Elastic8Settings, Elastic8ConfigDef] {
+ override def read(configDef: Elastic8ConfigDef, props: Map[String, String]): Either[Throwable, Elastic8Settings] =
+ for {
+ config <- Try(ElasticConfig(configDef, configDef.connectorPrefix, props)).toEither
+ commonSettings <- ElasticCommonSettingsReader.read(config.configDef, props)
+ httpBasicAuthUsername = config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_USERNAME)
+ httpBasicAuthPassword = config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_PASSWORD)
+ hostNames = config.getString(configDef.HOSTS).split(",").toSeq
+ protocol = config.getString(configDef.PROTOCOL)
+ port = config.getInt(configDef.ES_PORT)
+ prefix = Try(config.getString(configDef.ES_PREFIX)) match {
+ case Success("") => None
+ case Success(configString) => Some(configString)
+ case Failure(_) => None
+ }
+ } yield {
+ Elastic8Settings(
+ commonSettings,
+ httpBasicAuthUsername,
+ httpBasicAuthPassword,
+ hostNames,
+ protocol,
+ port,
+ prefix,
+ )
+ }
+
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/writers/Elastic8ClientCreator.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/writers/Elastic8ClientCreator.scala
new file mode 100644
index 000000000..1d509b08c
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/writers/Elastic8ClientCreator.scala
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.writers
+
+import com.sksamuel.elastic4s.http.JavaClient
+import com.sksamuel.elastic4s.ElasticClient
+import com.sksamuel.elastic4s.ElasticNodeEndpoint
+import com.sksamuel.elastic4s.ElasticProperties
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticClientCreator
+import io.lenses.streamreactor.connect.elastic8.client.Elastic8ClientWrapper
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8Settings
+import org.apache.http.auth.AuthScope
+import org.apache.http.auth.UsernamePasswordCredentials
+import org.apache.http.client.config.RequestConfig.Builder
+import org.apache.http.impl.client.BasicCredentialsProvider
+import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
+
+import scala.util.Try
+
+object Elastic8ClientCreator extends ElasticClientCreator[Elastic8Settings] {
+
+ /**
+ * Construct a JSONWriter.
+ *
+ * @param config An elasticSinkConfig to extract settings from.
+ * @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
+ */
+ override def create(settings: Elastic8Settings): Either[Throwable, ElasticClientWrapper] = {
+ Try {
+
+ def endpoints(
+ hostNames: Seq[String],
+ protocol: String,
+ port: Integer,
+ prefix: Option[String],
+ ): Seq[ElasticNodeEndpoint] =
+ hostNames
+ .map(hostname => ElasticNodeEndpoint(protocol, hostname, port, prefix))
+
+ val elasticProperties =
+ ElasticProperties(endpoints(settings.hostnames, settings.protocol, settings.port, settings.prefix).toIndexedSeq)
+ val javaClient = if (settings.httpBasicAuthUsername.nonEmpty && settings.httpBasicAuthPassword.nonEmpty) {
+ lazy val provider = {
+ val provider = new BasicCredentialsProvider
+ val credentials =
+ new UsernamePasswordCredentials(settings.httpBasicAuthUsername, settings.httpBasicAuthPassword)
+ provider.setCredentials(AuthScope.ANY, credentials)
+ provider
+ }
+
+ JavaClient(
+ elasticProperties,
+ (requestConfigBuilder: Builder) => requestConfigBuilder,
+ (httpClientBuilder: HttpAsyncClientBuilder) => httpClientBuilder.setDefaultCredentialsProvider(provider),
+ )
+
+ } else {
+ JavaClient(
+ elasticProperties,
+ )
+ }
+ new Elastic8ClientWrapper(ElasticClient(javaClient))
+ }
+ }.toEither
+}
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/CreateIndexTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/CreateIndexTest.scala
similarity index 91%
rename from kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/CreateIndexTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/CreateIndexTest.scala
index 870ea1689..50fbb4073 100644
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/CreateIndexTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/CreateIndexTest.scala
@@ -13,10 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6
+package io.lenses.streamreactor.connect.elastic8
import io.lenses.kcql.Kcql
-import io.lenses.streamreactor.connect.elastic6.indexname.CreateIndex
+import io.lenses.streamreactor.connect.elastic.common.indexname.CreateIndex
import org.joda.time.DateTime
import org.joda.time.DateTimeZone
import org.scalatest.matchers.should.Matchers
diff --git a/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticConfigTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticConfigTest.scala
new file mode 100644
index 000000000..795ff4748
--- /dev/null
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticConfigTest.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfig
+
+class ElasticConfigTest extends TestBase {
+ import configDef._
+
+ "A ElasticConfig should return the client mode and hostnames" in {
+ val config = ElasticConfig(configDef, configDef.connectorPrefix, getElasticSinkConfigProps())
+ config.getString(HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
+ config.getString(ES_CLUSTER_NAME) shouldBe ES_CLUSTER_NAME_DEFAULT
+ config.getString(KCQL) shouldBe QUERY
+ }
+
+ "A ElasticConfig should return the http basic auth username and password when set" in {
+ val config = ElasticConfig(configDef, configDef.connectorPrefix, getElasticSinkConfigPropsHTTPClient(auth = true))
+ config.getString(CLIENT_HTTP_BASIC_AUTH_USERNAME) shouldBe BASIC_AUTH_USERNAME
+ config.getString(CLIENT_HTTP_BASIC_AUTH_PASSWORD) shouldBe BASIC_AUTH_PASSWORD
+ }
+}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterCredentialsTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterCredentialsTest.scala
similarity index 53%
rename from kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterCredentialsTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterCredentialsTest.scala
index 3f8b2be63..d5aafa05c 100644
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticWriterCredentialsTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterCredentialsTest.scala
@@ -13,17 +13,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
+package io.lenses.streamreactor.connect.elastic8
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfig
-import io.lenses.streamreactor.connect.elastic7.config.ElasticSettings
+import io.lenses.streamreactor.connect.elastic8
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
+import org.scalatest.EitherValues
-class ElasticWriterCredentialsTest extends TestBase {
+class ElasticJsonWriterCredentialsTest extends TestBase with EitherValues {
"A writer should be using HTTP is set with HTTP Basic Auth Credentials" in {
- val config = new ElasticConfig(getElasticSinkConfigPropsHTTPClient(auth = true))
- val settings = ElasticSettings(config)
- settings.httpBasicAuthUsername shouldBe BASIC_AUTH_USERNAME
- settings.httpBasicAuthPassword shouldBe BASIC_AUTH_PASSWORD
+ val configDef = new Elastic8ConfigDef()
+ val settings =
+ elastic8.config.Elastic8SettingsReader.read(configDef, getElasticSinkConfigPropsHTTPClient(auth = true))
+ settings.value.httpBasicAuthUsername shouldBe BASIC_AUTH_USERNAME
+ settings.value.httpBasicAuthPassword shouldBe BASIC_AUTH_PASSWORD
}
}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkConnectorTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
similarity index 74%
rename from kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkConnectorTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
index 823002978..ef59bab1a 100644
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkConnectorTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
@@ -13,27 +13,30 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
-
-import io.lenses.streamreactor.connect.elastic7.config.ElasticConfigConstants
+package io.lenses.streamreactor.connect.elastic8
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
import scala.jdk.CollectionConverters.ListHasAsScala
import scala.jdk.CollectionConverters.MapHasAsJava
class ElasticSinkConnectorTest extends TestBase {
+
+ override val configDef = new Elastic8ConfigDef()
+ import configDef._
+
"Should start a Elastic Search Connector" in {
//get config
val config = getElasticSinkConfigProps()
//get connector
- val connector = new ElasticSinkConnector()
+ val connector = new Elastic8SinkConnector()
//start with config
connector.start(config.asJava)
//check config
val taskConfigs = connector.taskConfigs(10)
- taskConfigs.asScala.head.get(ElasticConfigConstants.HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
+ taskConfigs.asScala.head.get(HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
taskConfigs.size() shouldBe 10
//check connector
- connector.taskClass() shouldBe classOf[ElasticSinkTask]
+ connector.taskClass() shouldBe classOf[Elastic8SinkTask]
connector.stop()
}
}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkTaskTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
similarity index 94%
rename from kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkTaskTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
index 8e4e20c9f..2700c67df 100644
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/ElasticSinkTaskTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7
+package io.lenses.streamreactor.connect.elastic8
import org.apache.kafka.connect.sink.SinkTaskContext
import org.mockito.MockitoSugar
@@ -28,7 +28,7 @@ class ElasticSinkTaskTest extends TestBase with MockitoSugar {
//get config
val config = getElasticSinkConfigProps()
//get task
- val task = new ElasticSinkTask()
+ val task = new Elastic8SinkTask()
//initialise the tasks context
task.initialize(context)
//check version
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/TestBase.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/TestBase.scala
similarity index 50%
rename from kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/TestBase.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/TestBase.scala
index aea9978ad..b2cbfc886 100644
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/TestBase.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/TestBase.scala
@@ -13,9 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6
+package io.lenses.streamreactor.connect.elastic8
-import io.lenses.streamreactor.connect.elastic6.config.ElasticConfigConstants
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
import org.apache.kafka.common.TopicPartition
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
@@ -24,8 +24,13 @@ import org.scalatest.wordspec.AnyWordSpec
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter._
import java.util
+import scala.jdk.CollectionConverters.SetHasAsJava
trait TestBase extends AnyWordSpec with Matchers with BeforeAndAfter {
+
+ val configDef = new Elastic8ConfigDef()
+ import configDef._
+
val ELASTIC_SEARCH_HOSTNAMES = "localhost:9300"
val BASIC_AUTH_USERNAME = "usertest"
val BASIC_AUTH_PASSWORD = "userpassword"
@@ -38,50 +43,47 @@ trait TestBase extends AnyWordSpec with Matchers with BeforeAndAfter {
val UPDATE_QUERY = s"UPSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
val UPDATE_QUERY_SELECTION = s"UPSERT INTO $INDEX SELECT id, string_field FROM $TOPIC PK id"
- protected val PARTITION: Int = 12
- protected val PARTITION2: Int = 13
- protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
- protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
- protected val ASSIGNMENT: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
- //Set topic assignments
- ASSIGNMENT.add(TOPIC_PARTITION)
- ASSIGNMENT.add(TOPIC_PARTITION2)
+ protected val PARTITION: Int = 12
+ protected val PARTITION2: Int = 13
+ protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
+ protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
+ protected val ASSIGNMENT: Set[TopicPartition] = Set(TOPIC_PARTITION, TOPIC_PARTITION2)
//get the assignment of topic partitions for the sinkTask
def getAssignment: util.Set[TopicPartition] =
- ASSIGNMENT
+ ASSIGNMENT.asJava
def getElasticSinkConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(QUERY, clusterName)
def getBaseElasticSinkConfigProps(
query: String,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- "topics" -> TOPIC,
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> query,
+ "topics" -> TOPIC,
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> query,
)
def getElasticSinkConfigPropsHTTPClient(
auth: Boolean = false,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> QUERY,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> QUERY,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
+ else
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
+ else
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
)
}
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/ClockFixture.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/ClockFixture.scala
similarity index 93%
rename from kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/ClockFixture.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/ClockFixture.scala
index 05780ec4a..ddac82b57 100644
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/ClockFixture.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/ClockFixture.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7.indexname
+package io.lenses.streamreactor.connect.elastic8.indexname
import java.time.Clock
import java.time.Instant
diff --git a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/CustomIndexNameTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
similarity index 85%
rename from kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/CustomIndexNameTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
index 5b829c6fe..90727e956 100644
--- a/kafka-connect-elastic6/src/test/scala/io/lenses/streamreactor/connect/elastic6/indexname/CustomIndexNameTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
@@ -13,8 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6.indexname
+package io.lenses.streamreactor.connect.elastic8.indexname
+import io.lenses.streamreactor.connect.elastic.common.indexname.CustomIndexName
+import io.lenses.streamreactor.connect.elastic.common.indexname.TextFragment
+import io.lenses.streamreactor.connect.elastic.common.indexname.DateTimeFragment
+import io.lenses.streamreactor.connect.elastic.common.indexname.InvalidCustomIndexNameException
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks
diff --git a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/IndexNameFragmentTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
similarity index 89%
rename from kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/IndexNameFragmentTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
index 6f897d7f7..02079d5e6 100644
--- a/kafka-connect-elastic7/src/test/scala/io/lenses/streamreactor/connect/elastic7/indexname/IndexNameFragmentTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
@@ -13,8 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic7.indexname
+package io.lenses.streamreactor.connect.elastic8.indexname
+import io.lenses.streamreactor.connect.elastic.common.indexname.DateTimeFragment
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
diff --git a/kafka-connect-influxdb/src/main/scala/io/lenses/streamreactor/connect/influx/InfluxSinkConnector.scala b/kafka-connect-influxdb/src/main/scala/io/lenses/streamreactor/connect/influx/InfluxSinkConnector.scala
index 8a1624395..a9ec2005b 100644
--- a/kafka-connect-influxdb/src/main/scala/io/lenses/streamreactor/connect/influx/InfluxSinkConnector.scala
+++ b/kafka-connect-influxdb/src/main/scala/io/lenses/streamreactor/connect/influx/InfluxSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package io.lenses.streamreactor.connect.influx
+import cats.implicits.toBifunctorOps
import io.lenses.streamreactor.common.config.Helpers
import io.lenses.streamreactor.common.utils.JarManifest
@@ -63,7 +64,7 @@ class InfluxSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting InfluxDb sink task with ${props.toString}.")
- Helpers.checkInputTopics(InfluxConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(InfluxConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-jms/src/main/scala/io/lenses/streamreactor/connect/jms/sink/JMSSinkConnector.scala b/kafka-connect-jms/src/main/scala/io/lenses/streamreactor/connect/jms/sink/JMSSinkConnector.scala
index bc95e275a..59435b8bf 100644
--- a/kafka-connect-jms/src/main/scala/io/lenses/streamreactor/connect/jms/sink/JMSSinkConnector.scala
+++ b/kafka-connect-jms/src/main/scala/io/lenses/streamreactor/connect/jms/sink/JMSSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package io.lenses.streamreactor.connect.jms.sink
+import cats.implicits.toBifunctorOps
import io.lenses.streamreactor.common.config.Helpers
import io.lenses.streamreactor.common.utils.JarManifest
import io.lenses.streamreactor.connect.jms.config.JMSConfig
@@ -62,7 +63,7 @@ class JMSSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Jms sink task.")
- Helpers.checkInputTopics(JMSConfigConstants.KCQL, props.asScala.toMap)
+ Helpers.checkInputTopics(JMSConfigConstants.KCQL, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-mongodb/src/fun/scala/io/lenses/streamreactor/connect/MongoDBTest.scala b/kafka-connect-mongodb/src/fun/scala/io/lenses/streamreactor/connect/MongoDBTest.scala
index 8ae95d047..f800965b9 100644
--- a/kafka-connect-mongodb/src/fun/scala/io/lenses/streamreactor/connect/MongoDBTest.scala
+++ b/kafka-connect-mongodb/src/fun/scala/io/lenses/streamreactor/connect/MongoDBTest.scala
@@ -1,5 +1,4 @@
package io.lenses.streamreactor.connect
-
import cats.effect.IO
import cats.effect.testing.scalatest.AsyncIOSpec
import _root_.io.confluent.kafka.serializers.KafkaJsonSerializer
diff --git a/kafka-connect-mqtt/src/main/scala/io/lenses/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala b/kafka-connect-mqtt/src/main/scala/io/lenses/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala
index 37753b44a..65e2e6f02 100644
--- a/kafka-connect-mqtt/src/main/scala/io/lenses/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala
+++ b/kafka-connect-mqtt/src/main/scala/io/lenses/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package io.lenses.streamreactor.connect.mqtt.sink
+import cats.implicits.toBifunctorOps
import io.lenses.streamreactor.common.config.Helpers
import io.lenses.streamreactor.common.utils.JarManifest
import io.lenses.streamreactor.connect.mqtt.config.MqttConfigConstants
@@ -40,7 +41,7 @@ class MqttSinkConnector extends SinkConnector with StrictLogging {
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Mqtt sink connector.")
- Helpers.checkInputTopics(MqttConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(MqttConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-opensearch/src/fun/resources/keystore.jks b/kafka-connect-opensearch/src/fun/resources/keystore.jks
new file mode 100644
index 000000000..e69de29bb
diff --git a/kafka-connect-opensearch/src/fun/resources/truststore.jceks b/kafka-connect-opensearch/src/fun/resources/truststore.jceks
new file mode 100644
index 000000000..9db5b5f07
Binary files /dev/null and b/kafka-connect-opensearch/src/fun/resources/truststore.jceks differ
diff --git a/kafka-connect-opensearch/src/fun/resources/truststore.jks b/kafka-connect-opensearch/src/fun/resources/truststore.jks
new file mode 100644
index 000000000..e69de29bb
diff --git a/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchSslTest.scala b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchSslTest.scala
new file mode 100644
index 000000000..553b00f45
--- /dev/null
+++ b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchSslTest.scala
@@ -0,0 +1,49 @@
+package io.lenses.streamreactor.connect
+
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConfigValue
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
+import _root_.io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
+import cats.effect.testing.scalatest.AsyncIOSpec
+import org.apache.kafka.common.config.SslConfigs
+import org.scalatest.flatspec.AsyncFlatSpecLike
+import org.scalatest.matchers.should.Matchers
+
+class OpenSearchSslTest
+ extends OpenSearchTestBase("open-ssl")
+ with AsyncFlatSpecLike
+ with AsyncIOSpec
+ with StreamReactorContainerPerSuite
+ with Matchers {
+
+ behavior of "OpenSearch connector with SSL"
+
+ it should "sink records with ssl enabled" ignore {
+
+ runTest(
+ "https",
+ ConnectorConfiguration(
+ "opensearch-sink-ssl",
+ Map(
+ "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.opensearch.OpenSearchSinkConnector"),
+ "tasks.max" -> ConfigValue(1),
+ "topics" -> ConfigValue("orders"),
+ "connect.opensearch.use.http.username" -> ConfigValue("admin"),
+ "connect.opensearch.use.http.password" -> ConfigValue("admin"),
+ "connect.opensearch.protocol" -> ConfigValue("https"),
+ "connect.opensearch.hosts" -> ConfigValue(container.setup.key),
+ "connect.opensearch.port" -> ConfigValue(Integer.valueOf(container.port)),
+ "connect.opensearch.cluster.name" -> ConfigValue(container.setup.key),
+ "connect.opensearch.kcql" -> ConfigValue("INSERT INTO orders SELECT * FROM orders AUTOCREATE"),
+ "connect.progress.enabled" -> ConfigValue(true),
+ SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG -> ConfigValue("JKS"),
+ SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG -> ConfigValue("/security/truststore.jks"),
+ SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG -> ConfigValue("changeIt"),
+ SslConfigs.SSL_KEYSTORE_TYPE_CONFIG -> ConfigValue("JKS"),
+ SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG -> ConfigValue("/security/keystore.jks"),
+ SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG -> ConfigValue("changeIt"),
+ ),
+ ),
+ )
+ }
+
+}
diff --git a/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTest.scala b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTest.scala
new file mode 100644
index 000000000..d5701e7c4
--- /dev/null
+++ b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTest.scala
@@ -0,0 +1,39 @@
+package io.lenses.streamreactor.connect
+
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConfigValue
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
+import _root_.io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
+import cats.effect.testing.scalatest.AsyncIOSpec
+import org.scalatest.flatspec.AsyncFlatSpecLike
+import org.scalatest.matchers.should.Matchers
+
+class OpenSearchTest
+ extends OpenSearchTestBase("open")
+ with AsyncFlatSpecLike
+ with AsyncIOSpec
+ with StreamReactorContainerPerSuite
+ with Matchers {
+
+ behavior of "OpenSearch connector"
+
+ it should "sink records" in {
+ runTest(
+ "http",
+ ConnectorConfiguration(
+ "opensearch-sink",
+ Map(
+ "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.opensearch.OpenSearchSinkConnector"),
+ "tasks.max" -> ConfigValue(1),
+ "topics" -> ConfigValue("orders"),
+ "connect.opensearch.protocol" -> ConfigValue("http"),
+ "connect.opensearch.hosts" -> ConfigValue(container.setup.key),
+ "connect.opensearch.port" -> ConfigValue(Integer.valueOf(container.port)),
+ "connect.opensearch.cluster.name" -> ConfigValue(container.setup.key),
+ "connect.opensearch.kcql" -> ConfigValue("INSERT INTO orders SELECT * FROM orders AUTOCREATE"),
+ "connect.progress.enabled" -> ConfigValue(true),
+ ),
+ ),
+ )
+ }
+
+}
diff --git a/kafka-connect-elastic6/src/fun/scala/io/lenses/streamreactor/connect/Elastic6Test.scala b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTestBase.scala
similarity index 52%
rename from kafka-connect-elastic6/src/fun/scala/io/lenses/streamreactor/connect/Elastic6Test.scala
rename to kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTestBase.scala
index 7006122d8..ca3567947 100644
--- a/kafka-connect-elastic6/src/fun/scala/io/lenses/streamreactor/connect/Elastic6Test.scala
+++ b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTestBase.scala
@@ -1,35 +1,52 @@
package io.lenses.streamreactor.connect
-import cats.effect.IO
-import cats.effect.testing.scalatest.AsyncIOSpec
-import com.jayway.jsonpath.JsonPath
+
import _root_.io.confluent.kafka.serializers.KafkaJsonSerializer
import _root_.io.lenses.streamreactor.connect.model.Order
-import _root_.io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer
-import _root_.io.lenses.streamreactor.connect.testcontainers.SchemaRegistryContainer
-import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConfigValue
import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
import _root_.io.lenses.streamreactor.connect.testcontainers.connect.KafkaConnectClient.createConnector
import _root_.io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
+import _root_.io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer
+import _root_.io.lenses.streamreactor.connect.testcontainers.SchemaRegistryContainer
+import cats.effect.IO
+import cats.effect.testing.scalatest.AsyncIOSpec
+import cats.implicits.catsSyntaxOptionId
+import com.jayway.jsonpath.JsonPath
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.StringSerializer
-import org.scalatest.flatspec.AsyncFlatSpec
+import org.scalatest.Assertion
+import org.scalatest.flatspec.AsyncFlatSpecLike
import org.scalatest.matchers.should.Matchers
import java.net.URI
+import java.net.http.HttpResponse.BodyHandlers
import java.net.http.HttpClient
import java.net.http.HttpRequest
-import java.net.http.HttpResponse.BodyHandlers
+import java.security.SecureRandom
+import java.security.cert.X509Certificate
+import javax.net.ssl.SSLContext
+import javax.net.ssl.X509TrustManager
+import scala.concurrent.Future
+
+abstract class OpenSearchTestBase(containerKey: String)
+ extends AsyncFlatSpecLike
+ with AsyncIOSpec
+ with StreamReactorContainerPerSuite
+ with Matchers {
-class Elastic6Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorContainerPerSuite with Matchers {
+ override val schemaRegistryContainer: Option[SchemaRegistryContainer] = None
- lazy val container: ElasticsearchContainer = ElasticsearchContainer().withNetwork(network)
+ override def connectorModule: String = "opensearch"
- override val schemaRegistryContainer: Option[SchemaRegistryContainer] = None
+ override def useKeyStore: Boolean = true
- override val connectorModule: String = "elastic6"
+ val container: ElasticsearchContainer = ElasticsearchContainer(containerKey).withNetwork(network)
+
+ override val commonName: Option[String] = container.setup.key.some
override def beforeAll(): Unit = {
+ copyBinds(container.container, "/usr/share/opensearch/config/security/")
container.start()
+
super.beforeAll()
}
@@ -38,41 +55,35 @@ class Elastic6Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorCont
container.stop()
}
- behavior of "Elastic6 connector"
-
- it should "sink records" in {
- val resources = {
- for {
- producer <- createProducer[String, Order](classOf[StringSerializer], classOf[KafkaJsonSerializer[Order]])
- connector <- createConnector(sinkConfig(), 30L)
- } yield (producer, connector)
- }
+ protected def runTest(protocol: String, configuration: ConnectorConfiguration): Future[Assertion] = {
+ val resources = for {
+ producer <- createProducer[String, Order](classOf[StringSerializer], classOf[KafkaJsonSerializer[Order]])
+ connector <- createConnector(configuration, 60L)
+ } yield (producer, connector)
resources.use {
case (producer, _) =>
IO {
-
// Write records to topic
val order = Order(1, "OP-DAX-P-20150201-95.7", 94.2, 100)
producer.send(new ProducerRecord[String, Order]("orders", order)).get()
producer.flush()
- val client = HttpClient.newHttpClient()
- val request =
- HttpRequest
- .newBuilder()
- .GET()
- .uri(new URI("http://" + container.hostNetwork.httpHostAddress + "/orders/_search/?q=OP-DAX-P-20150201"))
- .build()
+ val client = HttpClient.newBuilder().sslContext(createTrustAllCertsSslContext).build()
+ val request = HttpRequest.newBuilder()
+ .GET().uri(
+ new URI(
+ s"$protocol://${container.hostNetwork.httpHostAddress}/orders/_search/?q=OP-DAX-P-20150201",
+ ),
+ ).build()
eventually {
val response = client.send(request, BodyHandlers.ofString())
val body = response.body
- assert(JsonPath.read[Int](body, "$.hits.total") == 1)
+ assert(JsonPath.read[Int](body, "$.hits.total.value") == 1)
}
client.send(request, BodyHandlers.ofString())
-
}.asserting {
response =>
val body = response.body
@@ -81,23 +92,19 @@ class Elastic6Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorCont
JsonPath.read[Double](body, "$.hits.hits[0]._source.price") should be(94.2)
JsonPath.read[Int](body, "$.hits.hits[0]._source.qty") should be(100)
}
-
}
}
- private def sinkConfig(): ConnectorConfiguration =
- ConnectorConfiguration(
- "elastic-sink",
- Map(
- "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.elastic6.ElasticSinkConnector"),
- "tasks.max" -> ConfigValue(1),
- "topics" -> ConfigValue("orders"),
- "connect.elastic.protocol" -> ConfigValue("http"),
- "connect.elastic.hosts" -> ConfigValue(container.networkAlias),
- "connect.elastic.port" -> ConfigValue(Integer.valueOf(container.port)),
- "connect.elastic.cluster.name" -> ConfigValue("elasticsearch"),
- "connect.elastic.kcql" -> ConfigValue("INSERT INTO orders SELECT * FROM orders"),
- "connect.progress.enabled" -> ConfigValue("true"),
- ),
- )
+ private def createTrustAllCertsSslContext = {
+ val trustAllCerts = new X509TrustManager {
+ override def checkClientTrusted(chain: Array[X509Certificate], authType: String): Unit = ()
+
+ override def checkServerTrusted(chain: Array[X509Certificate], authType: String): Unit = ()
+
+ override def getAcceptedIssuers: Array[X509Certificate] = null
+ }
+ val sslContext = SSLContext.getInstance("TLS")
+ sslContext.init(null, Array(trustAllCerts), new SecureRandom())
+ sslContext
+ }
}
diff --git a/kafka-connect-elastic7/src/main/resources/elastic-ascii.txt b/kafka-connect-opensearch/src/main/resources/opensearch-ascii.txt
similarity index 70%
rename from kafka-connect-elastic7/src/main/resources/elastic-ascii.txt
rename to kafka-connect-opensearch/src/main/resources/opensearch-ascii.txt
index 4592f3ae9..73b4edfc4 100644
--- a/kafka-connect-elastic7/src/main/resources/elastic-ascii.txt
+++ b/kafka-connect-opensearch/src/main/resources/opensearch-ascii.txt
@@ -10,11 +10,15 @@
█████████ ████████████████████████████████████████████████████████████████████
█████████ ▄████████████████████████████████████████████████████████████████████
████████████████████████████████████████████████████████████████████████████████
- ________ __ _ _____ _ __
- / ____/ /___ ______/ /_(_)____/ ___/(_)___ / /__
- / __/ / / __ `/ ___/ __/ / ___/\__ \/ / __ \/ //_/
- / /___/ / /_/ (__ ) /_/ / /__ ___/ / / / / / ,<
- /_____/_/\__,_/____/\__/_/\___//____/_/_/ /_/_/|_|
-
-
-by Andrew Stevenson
+ ________ _________ .__
+ \_____ \ ______ ____ ____ / _____/ ____ _____ _______ ____ | |__
+ / | \\____ \_/ __ \ / \ \_____ \_/ __ \\__ \\_ __ \_/ ___\| | \
+ / | \ |_> > ___/| | \/ \ ___/ / __ \| | \/\ \___| Y \
+ \_______ / __/ \___ >___| /_______ /\___ >____ /__| \___ >___| /
+ \/|__| \/ \/ \/ \/ \/ \/ \/
+ _________.__ __
+ / _____/|__| ____ | | __
+ \_____ \ | |/ \| |/ /
+ / \| | | \ <
+ /_______ /|__|___| /__|_ \
+ \/ \/ \/
\ No newline at end of file
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkConnector.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkConnector.scala
new file mode 100644
index 000000000..d11e8a7a4
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkConnector.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkConnector
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchConfigDef
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+
+class OpenSearchSinkConnector
+ extends ElasticSinkConnector[
+ OpenSearchSettings,
+ OpenSearchConfigDef,
+ OpenSearchSinkTask,
+ ](classOf[OpenSearchSinkTask], new OpenSearchConfigDef) {}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkTask.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkTask.scala
new file mode 100644
index 000000000..ba5c85109
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkTask.scala
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkTask
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchConfigDef
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettingsReader
+import io.lenses.streamreactor.connect.opensearch.writers.OpenSearchClientCreator
+
+class OpenSearchSinkTask
+ extends ElasticSinkTask[OpenSearchSettings, OpenSearchConfigDef](
+ OpenSearchSettingsReader,
+ OpenSearchClientCreator,
+ new OpenSearchConfigDef(),
+ "/opensearch-ascii.txt",
+ ) {}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapper.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapper.scala
new file mode 100644
index 000000000..9390ffc61
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapper.scala
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.client
+
+import cats.effect.IO
+import com.typesafe.scalalogging.LazyLogging
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.Request
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+import org.opensearch.client.opensearch.{ OpenSearchClient => UnderlyingOpenSearchClient }
+import org.opensearch.client.opensearch._types.Refresh
+import org.opensearch.client.opensearch.core.BulkRequest
+import org.opensearch.client.opensearch.core.bulk.BulkOperation
+import org.opensearch.client.opensearch.core.bulk.IndexOperation
+import org.opensearch.client.opensearch.core.bulk.UpdateOperation
+import org.opensearch.client.opensearch.indices.CreateIndexRequest
+import org.opensearch.client.transport.OpenSearchTransport
+
+import scala.jdk.CollectionConverters.SeqHasAsJava
+
+class OpenSearchClientWrapper(transport: OpenSearchTransport, client: UnderlyingOpenSearchClient)
+ extends ElasticClientWrapper
+ with LazyLogging {
+
+ override def createIndex(indexName: String): IO[Unit] =
+ IO {
+ val createIndexRequest = new CreateIndexRequest.Builder()
+ .index(indexName)
+ .build()
+ client.indices().create(createIndexRequest)
+ } *> IO.unit
+
+ override def close(): IO[Unit] = IO {
+ transport.close()
+ ()
+ }.recover { t: Throwable =>
+ logger.error("Error during OpenSearch client shutdown", t)
+ ()
+ }
+
+ override def execute(reqs: Seq[Request]): IO[Unit] =
+ IO {
+ val bulkOps: List[BulkOperation] = reqs.map {
+ case InsertRequest(index, id, json, pipeline) =>
+ new BulkOperation.Builder().index(
+ new IndexOperation.Builder().index(index).id(id).document(json).pipeline(pipeline).build(),
+ ).build()
+ case UpsertRequest(index, id, json) =>
+ new BulkOperation.Builder().update(
+ new UpdateOperation.Builder().index(index).id(id).document(json).docAsUpsert(true).build(),
+ ).build()
+ }.toList
+
+ val bulkReq = new BulkRequest.Builder().refresh(Refresh.True).operations(bulkOps.asJava).build()
+ client.bulk(bulkReq)
+ ()
+ }
+}
diff --git a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/CreateIndex.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/AuthMode.scala
similarity index 61%
rename from kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/CreateIndex.scala
rename to kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/AuthMode.scala
index 34729a36e..d0879f4f2 100644
--- a/kafka-connect-elastic6/src/main/scala/io/lenses/streamreactor/connect/elastic6/indexname/CreateIndex.scala
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/AuthMode.scala
@@ -13,16 +13,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.lenses.streamreactor.connect.elastic6.indexname
-
-import io.lenses.kcql.Kcql
-
-/**
- * Creates the index for the given KCQL configuration.
- */
-object CreateIndex {
- def getIndexName(kcql: Kcql): String =
- Option(kcql.getIndexSuffix).fold(kcql.getTarget) { indexNameSuffix =>
- s"${kcql.getTarget}${CustomIndexName.parseIndexName(indexNameSuffix)}"
- }
+package io.lenses.streamreactor.connect.opensearch.config
+
+import enumeratum.Enum
+import enumeratum.EnumEntry
+
+import scala.collection.immutable
+
+object AuthMode extends Enum[AuthMode] {
+
+ override val values: immutable.IndexedSeq[AuthMode] = findValues
+
+ case object Credentials extends AuthMode
+
+ case object Default extends AuthMode
+
}
+
+sealed trait AuthMode extends EnumEntry
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/CredentialPair.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/CredentialPair.scala
new file mode 100644
index 000000000..6af15fa2b
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/CredentialPair.scala
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+case class CredentialPair(username: String, password: String)
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchConfigDef.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchConfigDef.scala
new file mode 100644
index 000000000..5d3319655
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchConfigDef.scala
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import io.lenses.streamreactor.common.config.base.const.TraitConfigConst._
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import org.apache.kafka.common.config.ConfigDef
+import org.apache.kafka.common.config.ConfigDef.Importance
+import org.apache.kafka.common.config.ConfigDef.Type
+
+class OpenSearchConfigDef extends ElasticConfigDef("connect.opensearch") {
+
+ val TRANSPORT = s"$connectorPrefix.type"
+ val TRANSPORT_DOC = "OpenSearch Type (AWS, AWS_SERVERLESS, HTTPS)"
+ val TRANSPORT_DEFAULT = "https"
+
+ val AWS_REGION: String = s"$connectorPrefix.aws.region"
+ val AWS_ACCESS_KEY: String = s"$connectorPrefix.aws.access.key"
+ val AWS_SECRET_KEY: String = s"$connectorPrefix.aws.secret.key"
+ val AUTH_MODE: String = s"$connectorPrefix.aws.auth.mode"
+
+ val PROTOCOL = s"$connectorPrefix.protocol"
+ val PROTOCOL_DOC = "URL protocol (http, https) - required for non-AWS only"
+ val PROTOCOL_DEFAULT = "http"
+
+ val HOSTS = s"$connectorPrefix.$CONNECTION_HOSTS_SUFFIX"
+ val HOSTS_DOC = "List of hostnames for Elastic Search cluster node, not including protocol or port."
+ val HOSTS_DEFAULT = "localhost"
+
+ val ES_PORT = s"$connectorPrefix.$CONNECTION_PORT_SUFFIX"
+ val ES_PORT_DOC = "Port on which Elastic Search node listens on"
+ val ES_PORT_DEFAULT = 9300
+
+ val ES_PREFIX = s"$connectorPrefix.tableprefix"
+ val ES_PREFIX_DOC = "Table prefix (optional)"
+ val ES_PREFIX_DEFAULT = ""
+
+ val ES_CLUSTER_NAME = s"$connectorPrefix.$CLUSTER_NAME_SUFFIX"
+ val ES_CLUSTER_NAME_DOC = "Name of the elastic search cluster, used in local mode for setting the connection"
+ val ES_CLUSTER_NAME_DEFAULT = "elasticsearch"
+
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME = s"$connectorPrefix.use.http.username"
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC = "Username if HTTP Basic Auth required default is null."
+
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD = s"$connectorPrefix.use.http.password"
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC = "Password if HTTP Basic Auth required default is null."
+
+ override def configDef: ConfigDef = super.configDef
+ .define(
+ TRANSPORT,
+ Type.STRING,
+ TRANSPORT_DEFAULT,
+ Importance.HIGH,
+ TRANSPORT_DOC,
+ )
+ .define(
+ PROTOCOL,
+ Type.STRING,
+ PROTOCOL_DEFAULT,
+ Importance.LOW,
+ PROTOCOL_DOC,
+ "Connection",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ PROTOCOL,
+ )
+ .define(
+ HOSTS,
+ Type.STRING,
+ HOSTS_DEFAULT,
+ Importance.HIGH,
+ HOSTS_DOC,
+ "Connection",
+ 2,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PORT,
+ Type.INT,
+ ES_PORT_DEFAULT,
+ Importance.HIGH,
+ ES_PORT_DOC,
+ "Connection",
+ 3,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PREFIX,
+ Type.STRING,
+ ES_PREFIX_DEFAULT,
+ Importance.HIGH,
+ ES_PREFIX_DOC,
+ "Connection",
+ 4,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_CLUSTER_NAME,
+ Type.STRING,
+ ES_CLUSTER_NAME_DEFAULT,
+ Importance.HIGH,
+ ES_CLUSTER_NAME_DOC,
+ "Connection",
+ 5,
+ ConfigDef.Width.MEDIUM,
+ ES_CLUSTER_NAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC,
+ "Connection",
+ 8,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC,
+ "Connection",
+ 9,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ )
+ .define(
+ AWS_REGION,
+ Type.STRING,
+ "",
+ Importance.HIGH,
+ "AWS region",
+ )
+ .define(
+ AWS_ACCESS_KEY,
+ Type.PASSWORD,
+ "",
+ Importance.HIGH,
+ "AWS access key",
+ )
+ .define(
+ AWS_SECRET_KEY,
+ Type.PASSWORD,
+ "",
+ Importance.HIGH,
+ "AWS password key",
+ )
+ .define(
+ AUTH_MODE,
+ Type.STRING,
+ "",
+ Importance.HIGH,
+ "Authenticate mode, 'credentials' or 'default'",
+ )
+
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettings.scala
new file mode 100644
index 000000000..d51e01fca
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettings.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettings
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
+import io.lenses.streamreactor.connect.opensearch.config.connection.OpenSeearchConnectionSettings
+
+case class OpenSearchSettings(
+ elasticCommonSettings: ElasticCommonSettings,
+ connection: OpenSeearchConnectionSettings,
+) extends ElasticSettings {
+ override def common: ElasticCommonSettings = elasticCommonSettings
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettingsReader.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettingsReader.scala
new file mode 100644
index 000000000..3f7e2e543
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettingsReader.scala
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import cats.implicits.catsSyntaxEitherId
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfig
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettingsReader
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchTransport.Aws
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchTransport.AwsServerless
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchTransport.Http
+import io.lenses.streamreactor.connect.opensearch.config.connection.AWSConnectionSettings
+import io.lenses.streamreactor.connect.opensearch.config.connection.RestConnectionSettings
+import io.lenses.streamreactor.connect.security.StoresInfo
+import org.apache.kafka.connect.errors.ConnectException
+
+import scala.util.Try
+
+object OpenSearchSettingsReader extends ElasticSettingsReader[OpenSearchSettings, OpenSearchConfigDef] {
+ override def read(configDef: OpenSearchConfigDef, props: Map[String, String]): Either[Throwable, OpenSearchSettings] =
+ for {
+ config <- Try(ElasticConfig(configDef, configDef.connectorPrefix, props)).toEither
+ commonSettings <- ElasticCommonSettingsReader.read(config.configDef, props)
+ transportType = Option(config.getString(configDef.TRANSPORT)).map(_.trim).filterNot(_.isEmpty).flatMap(
+ OpenSearchTransport.withNameInsensitiveOption,
+ ).getOrElse(OpenSearchTransport.Http)
+ hostNames = config.getString(configDef.HOSTS).split(",").toSeq
+
+ connectionSettings <- transportType match {
+ case Http =>
+ createHttpConnectionSettings(configDef, config, hostNames).asRight
+ case Aws | AwsServerless if hostNames.size == 1 =>
+ createAwsConnectionSettings(configDef, config, transportType, hostNames).asRight
+ case _ => new ConnectException("Multiple hosts not supported for AWS").asLeft
+ }
+
+ } yield {
+ OpenSearchSettings(
+ commonSettings,
+ connectionSettings,
+ )
+ }
+
+ private def createHttpConnectionSettings(
+ configDef: OpenSearchConfigDef,
+ config: ElasticConfig,
+ hostNames: Seq[String],
+ ) = {
+ val credentialPair = for {
+ httpBasicAuthUsername <- Option(config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_USERNAME)).filterNot(
+ _.trim.isEmpty,
+ )
+ httpBasicAuthPassword <- Option(config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_PASSWORD)).filterNot(
+ _.trim.isEmpty,
+ )
+ } yield {
+ CredentialPair(httpBasicAuthUsername, httpBasicAuthPassword)
+ }
+
+ val protocol = config.getString(configDef.PROTOCOL)
+ val port = config.getInt(configDef.ES_PORT)
+ val prefix = Option(config.getString(configDef.ES_PREFIX)).filterNot(_ == "")
+ val storesInfo = StoresInfo(config)
+ RestConnectionSettings(
+ hostNames,
+ protocol,
+ port,
+ prefix,
+ credentialPair,
+ storesInfo,
+ )
+ }
+
+ private def createAwsConnectionSettings(
+ configDef: OpenSearchConfigDef,
+ config: ElasticConfig,
+ transportType: OpenSearchTransport,
+ hostNames: Seq[String],
+ ) =
+ AWSConnectionSettings(
+ hostNames.head,
+ config.getString(configDef.AWS_REGION).trim,
+ Option(config.getString(configDef.AWS_ACCESS_KEY)).map(_.trim),
+ Option(config.getString(configDef.AWS_SECRET_KEY)).map(_.trim),
+ Option(config.getString(configDef.AUTH_MODE)).map(_.trim).flatMap(
+ AuthMode.withNameInsensitiveOption,
+ ).getOrElse(AuthMode.Default),
+ serverless = transportType == OpenSearchTransport.AwsServerless,
+ )
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchTransport.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchTransport.scala
new file mode 100644
index 000000000..280f76d45
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchTransport.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import enumeratum.Enum
+import enumeratum.EnumEntry
+
+import scala.collection.immutable
+
+object OpenSearchTransport extends Enum[OpenSearchTransport] {
+
+ override val values: immutable.IndexedSeq[OpenSearchTransport] = findValues
+
+ case object Aws extends OpenSearchTransport
+
+ case object AwsServerless extends OpenSearchTransport
+
+ case object Http extends OpenSearchTransport
+
+}
+
+sealed trait OpenSearchTransport extends EnumEntry
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettings.scala
new file mode 100644
index 000000000..d703b7e7a
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettings.scala
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import cats.implicits.catsSyntaxEitherId
+import io.lenses.streamreactor.connect.opensearch.config.AuthMode
+import io.lenses.streamreactor.connect.opensearch.config.connection.OpenSeearchConnectionSettings.defaultCredentialsProvider
+import org.apache.kafka.connect.errors.ConnectException
+import org.opensearch.client.transport.OpenSearchTransport
+import org.opensearch.client.transport.aws.AwsSdk2Transport
+import org.opensearch.client.transport.aws.AwsSdk2TransportOptions
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
+import software.amazon.awssdk.http.apache.ApacheHttpClient
+import software.amazon.awssdk.regions.Region
+
+import scala.util.Try
+
+case class AWSConnectionSettings(
+ endpoint: String,
+ region: String,
+ accessKey: Option[String],
+ secretKey: Option[String],
+ authMode: AuthMode,
+ serverless: Boolean,
+) extends OpenSeearchConnectionSettings {
+ override def toTransport: Either[Throwable, OpenSearchTransport] =
+ for {
+ creds <- credentialsProvider()
+ tOpts <- Try {
+ val transportOptions = AwsSdk2TransportOptions.builder().setCredentials(creds).build()
+
+ val httpClient = ApacheHttpClient.builder().build()
+ new AwsSdk2Transport(
+ httpClient,
+ endpoint, // OpenSearch endpoint, without https://
+ if (serverless) "aoss" else "es",
+ Region.of(region),
+ transportOptions,
+ )
+ }.toEither
+ } yield tOpts
+
+ private def credentialsProvider(): Either[Throwable, AwsCredentialsProvider] =
+ (authMode, accessKey.zip(secretKey)) match {
+ case (AuthMode.Credentials, Some((access, secret))) =>
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(access, secret)).asRight
+ case (AuthMode.Credentials, None) => new ConnectException("No credentials specified").asLeft
+ case (AuthMode.Default, _) => defaultCredentialsProvider.asRight
+ }
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/OpenSeearchConnectionSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/OpenSeearchConnectionSettings.scala
new file mode 100644
index 000000000..62cf01c51
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/OpenSeearchConnectionSettings.scala
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import org.opensearch.client.transport.OpenSearchTransport
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider
+import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider
+
+object OpenSeearchConnectionSettings {
+
+ val defaultCredentialsProvider: AwsCredentialsProvider = DefaultCredentialsProvider.create()
+
+}
+
+trait OpenSeearchConnectionSettings {
+ def toTransport: Either[Throwable, OpenSearchTransport]
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/RestConnectionSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/RestConnectionSettings.scala
new file mode 100644
index 000000000..2b50acd0a
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/RestConnectionSettings.scala
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import io.lenses.streamreactor.connect.opensearch.config.CredentialPair
+import io.lenses.streamreactor.connect.security.StoresInfo
+import org.apache.http.HttpHost
+import org.apache.http.auth.AuthScope
+import org.apache.http.auth.UsernamePasswordCredentials
+import org.apache.http.impl.client.BasicCredentialsProvider
+import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
+import org.opensearch.client.RestClient
+import org.opensearch.client.json.jackson.JacksonJsonpMapper
+import org.opensearch.client.transport.OpenSearchTransport
+import org.opensearch.client.transport.rest_client.RestClientTransport
+
+import scala.util.Try
+
+case class RestConnectionSettings(
+ hostnames: Seq[String],
+ protocol: String,
+ port: Int,
+ prefix: Option[String],
+ httpBasicCreds: Option[CredentialPair],
+ storesInfo: StoresInfo,
+) extends OpenSeearchConnectionSettings {
+ override def toTransport: Either[Throwable, OpenSearchTransport] =
+ for {
+ restClient <- Try(createAndConfigureRestClient()).toEither
+ transport = new RestClientTransport(restClient, new JacksonJsonpMapper())
+ } yield transport
+
+ private def hostnameToHttpHost(hostname: String): HttpHost =
+ new HttpHost(prefix.map(hostname +).getOrElse(hostname), port, protocol)
+
+ private def createAndConfigureRestClient(): RestClient = {
+
+ val builder = RestClient.builder(
+ hostnames.map(hostnameToHttpHost): _*,
+ )
+
+ val sslContext = storesInfo.toSslContext
+ val credsProvider = httpBasicCreds.map(creds => createCredsProvider(creds.username, creds.password))
+
+ if (sslContext.nonEmpty || credsProvider.nonEmpty) {
+ builder.setHttpClientConfigCallback {
+ (httpClientBuilder: HttpAsyncClientBuilder) =>
+ credsProvider.foreach {
+ httpClientBuilder.setDefaultCredentialsProvider
+ }
+ sslContext.foreach {
+ httpClientBuilder.setSSLContext
+ }
+ httpClientBuilder
+ }
+ }
+ builder.build()
+ }
+
+ private def createCredsProvider(username: String, password: String) = {
+ val provider = new BasicCredentialsProvider()
+ provider.setCredentials(
+ AuthScope.ANY,
+ new UsernamePasswordCredentials(username, password),
+ )
+ provider
+ }
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreator.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreator.scala
new file mode 100644
index 000000000..f88ee9943
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreator.scala
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.writers
+
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticClientCreator
+import io.lenses.streamreactor.connect.opensearch.client.OpenSearchClientWrapper
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+import org.opensearch.client.opensearch.OpenSearchClient
+
+object OpenSearchClientCreator extends ElasticClientCreator[OpenSearchSettings] {
+
+ /**
+ * Construct a JSONWriter.
+ *
+ * @param config An elasticSinkConfig to extract settings from.
+ * @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
+ */
+ override def create(config: OpenSearchSettings): Either[Throwable, OpenSearchClientWrapper] =
+ for {
+ transport <- config.connection.toTransport
+ } yield new OpenSearchClientWrapper(transport, new OpenSearchClient(transport))
+
+}
diff --git a/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapperTest.scala b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapperTest.scala
new file mode 100644
index 000000000..c47974fab
--- /dev/null
+++ b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapperTest.scala
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.client
+
+import cats.effect.testing.scalatest.AsyncIOSpec
+import com.fasterxml.jackson.databind.node.TextNode
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+import org.mockito.ArgumentMatchers.any
+import org.mockito.ArgumentMatchers.argThat
+import org.mockito.Answers
+import org.mockito.MockitoSugar
+import org.opensearch.client.opensearch.OpenSearchClient
+import org.opensearch.client.opensearch.core.bulk.BulkResponseItem
+import org.opensearch.client.opensearch.core.BulkRequest
+import org.opensearch.client.opensearch.core.BulkResponse
+import org.opensearch.client.opensearch.indices.CreateIndexRequest
+import org.opensearch.client.opensearch.indices.CreateIndexResponse
+import org.opensearch.client.transport.OpenSearchTransport
+import org.scalatest.funsuite.AsyncFunSuite
+import org.scalatest.matchers.should.Matchers
+
+import scala.jdk.CollectionConverters.SeqHasAsJava
+
+class OpenSearchClientWrapperTest extends AsyncFunSuite with AsyncIOSpec with Matchers with MockitoSugar {
+
+ test("createIndex should create an index successfully") {
+ val mockClient = mock[OpenSearchClient](Answers.RETURNS_DEEP_STUBS)
+ val mockTransport = mock[OpenSearchTransport]
+
+ val clientWrapper = new OpenSearchClientWrapper(mockTransport, mockClient)
+ val indexName = "test_index"
+ when(mockClient.indices().create(any[CreateIndexRequest])).thenReturn(
+ new CreateIndexResponse.Builder().index(indexName).shardsAcknowledged(true).build(),
+ )
+
+ clientWrapper.createIndex(indexName).asserting {
+ result =>
+ verify(mockClient.indices()).create(argThat { request: CreateIndexRequest =>
+ request.index() == indexName
+ })
+ result shouldBe ()
+ }
+ }
+
+ test("close should close the client successfully") {
+ val mockClient = mock[OpenSearchClient]
+ val mockTransport = mock[OpenSearchTransport]
+
+ val clientWrapper = new OpenSearchClientWrapper(mockTransport, mockClient)
+ clientWrapper.close().asserting {
+ result =>
+ verify(mockTransport).close()
+ result shouldBe ()
+ }
+
+ }
+
+ test("execute should execute bulk requests successfully") {
+ val mockClient = mock[OpenSearchClient]
+ val mockTransport = mock[OpenSearchTransport]
+
+ val clientWrapper = new OpenSearchClientWrapper(mockTransport, mockClient)
+
+ val requests = Seq(
+ InsertRequest("index1", "id1", new TextNode("no"), "pipe"),
+ UpsertRequest("index2", "id2", new TextNode("no")),
+ )
+
+ when(mockClient.bulk(any[BulkRequest])).thenReturn(
+ new BulkResponse.Builder().errors(false).items(List[BulkResponseItem]().asJava).took(200L).build(),
+ )
+
+ clientWrapper.execute(requests).asserting {
+ result =>
+ verify(mockClient).bulk(any[BulkRequest])
+ result shouldBe ()
+ }
+
+ }
+}
diff --git a/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettingsTest.scala b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettingsTest.scala
new file mode 100644
index 000000000..8bf18644d
--- /dev/null
+++ b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettingsTest.scala
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import io.lenses.streamreactor.connect.opensearch.config.AuthMode
+import org.apache.kafka.connect.errors.ConnectException
+import org.mockito.MockitoSugar
+import org.opensearch.client.transport.aws.AwsSdk2Transport
+import org.scalatest.EitherValues
+import org.scalatest.funsuite.AnyFunSuite
+import org.scalatest.matchers.should.Matchers
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider
+
+class AWSConnectionSettingsTest extends AnyFunSuite with Matchers with MockitoSugar with EitherValues {
+
+ test("toTransport should return a valid OpenSearchTransport when using credentials") {
+ val endpoint = "test-endpoint"
+ val region = "us-east-1"
+ val accessKey = Some("access-key")
+ val secretKey = Some("secret-key")
+ val authMode = AuthMode.Credentials
+ val serverless = false
+
+ val settings = AWSConnectionSettings(endpoint, region, accessKey, secretKey, authMode, serverless)
+
+ settings.toTransport.value.asInstanceOf[AwsSdk2Transport]
+
+ }
+
+ test("toTransport should return an error when using credentials but they are not provided") {
+ val endpoint = "test-endpoint"
+ val region = "us-east-1"
+ val accessKey = None
+ val secretKey = None
+ val authMode = AuthMode.Credentials
+ val serverless = false
+
+ val settings = AWSConnectionSettings(endpoint, region, accessKey, secretKey, authMode, serverless)
+
+ val result = settings.toTransport
+
+ result shouldBe a[Left[_, _]]
+ result.left.value shouldBe a[ConnectException]
+ }
+
+ test("toTransport should return an error when an exception occurs during transport creation") {
+ val endpoint = "test-endpoint"
+ val region = ""
+ val accessKey = Some("access-key")
+ val secretKey = Some("secret-key")
+ val authMode = AuthMode.Credentials
+ val serverless = false
+
+ val settings = AWSConnectionSettings(endpoint, region, accessKey, secretKey, authMode, serverless)
+
+ val mockCredentialsProvider = mock[AwsCredentialsProvider]
+
+ when(mockCredentialsProvider.resolveCredentials()).thenReturn(AwsBasicCredentials.create("access-key",
+ "secret-key",
+ ))
+
+ val result = settings.toTransport
+
+ result shouldBe a[Left[_, _]]
+ result.left.value shouldBe a[IllegalArgumentException]
+ }
+}
diff --git a/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreatorTest.scala b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreatorTest.scala
new file mode 100644
index 000000000..246d3f6e5
--- /dev/null
+++ b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreatorTest.scala
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.writers
+
+import io.lenses.streamreactor.connect.opensearch.client.OpenSearchClientWrapper
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+import org.mockito.Answers
+import org.mockito.MockitoSugar
+import org.opensearch.client.transport.OpenSearchTransport
+import org.scalatest.EitherValues
+import org.scalatest.funsuite.AnyFunSuite
+import org.scalatest.matchers.should.Matchers
+
+class OpenSearchClientCreatorTest extends AnyFunSuite with Matchers with MockitoSugar with EitherValues {
+
+ test("create should return an OpenSearchClientWrapper with a valid OpenSearchClient") {
+
+ val mockSettings = mock[OpenSearchSettings](Answers.RETURNS_DEEP_STUBS)
+ val mockTransport = mock[OpenSearchTransport]
+ when(mockSettings.connection.toTransport).thenReturn(Right(mockTransport))
+
+ OpenSearchClientCreator.create(mockSettings).value should be(a[OpenSearchClientWrapper])
+ verify(mockSettings.connection).toTransport
+ }
+
+ test("create should return an error if creating a transport fails") {
+ val ex = new RuntimeException("Transport error")
+ val mockSettings = mock[OpenSearchSettings](Answers.RETURNS_DEEP_STUBS)
+ when(mockSettings.connection.toTransport).thenReturn(Left(ex))
+
+ OpenSearchClientCreator.create(mockSettings).left.value should be(ex)
+ verify(mockSettings.connection).toTransport
+ }
+}
diff --git a/kafka-connect-sql-common/src/main/scala/io/lenses/streamreactor/common/schemas/ConverterUtil.scala b/kafka-connect-sql-common/src/main/scala/io/lenses/streamreactor/common/schemas/ConverterUtil.scala
index d012be64c..d3cc33eb5 100644
--- a/kafka-connect-sql-common/src/main/scala/io/lenses/streamreactor/common/schemas/ConverterUtil.scala
+++ b/kafka-connect-sql-common/src/main/scala/io/lenses/streamreactor/common/schemas/ConverterUtil.scala
@@ -16,7 +16,6 @@
package io.lenses.streamreactor.common.schemas
import com.fasterxml.jackson.databind.JsonNode
-import io.confluent.connect.avro.AvroConverter
import io.confluent.connect.avro.AvroData
import io.lenses.streamreactor.common.schemas.StructHelper._
import io.lenses.streamreactor.connect.json.SimpleJsonConverter
@@ -24,7 +23,6 @@ import org.apache.avro.generic.GenericRecord
import org.apache.kafka.connect.connector.ConnectRecord
import org.apache.kafka.connect.data._
import org.apache.kafka.connect.errors.ConnectException
-import org.apache.kafka.connect.json.JsonDeserializer
import org.apache.kafka.connect.sink.SinkRecord
import org.json4s._
import org.json4s.jackson.JsonMethods._
@@ -45,8 +43,6 @@ trait ConverterUtil {
type avroSchema = org.apache.avro.Schema
lazy val simpleJsonConverter = new SimpleJsonConverter()
- lazy val deserializer = new JsonDeserializer()
- lazy val avroConverter = new AvroConverter()
lazy val avroData = new AvroData(100)
//for converting json to
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
index e52862e75..aeef89d6d 100644
--- a/project/Dependencies.scala
+++ b/project/Dependencies.scala
@@ -146,18 +146,17 @@ object Dependencies {
val elastic4sVersion, elasticSearchVersion, jnaVersion: String
}
- object Elastic6Versions extends ElasticVersions() {
- override val elastic4sVersion: String = "6.7.8"
- override val elasticSearchVersion: String = "6.8.23"
- override val jnaVersion: String = "3.0.9"
+ object Elastic8Versions extends ElasticVersions {
+ override val elastic4sVersion: String = "8.9.2"
+ override val elasticSearchVersion: String = "8.10.1"
+ override val jnaVersion: String = "4.5.1"
}
- object Elastic7Versions extends ElasticVersions {
- override val elastic4sVersion: String = "7.17.2"
- override val elasticSearchVersion: String = "7.17.2"
+ object OpenSearchVersions extends ElasticVersions {
+ override val elastic4sVersion: String = "8.9.2"
+ override val elasticSearchVersion: String = "8.10.1"
override val jnaVersion: String = "4.5.1"
}
-
}
import Versions._
@@ -226,12 +225,12 @@ object Dependencies {
val http4sCirce = "org.http4s" %% "http4s-circe" % http4sVersion
val http4s: Seq[ModuleID] = Seq(http4sDsl, http4sJdkClient, http4sCirce)
- val bouncyProv = "org.bouncycastle" % "bcprov-jdk15on" % bouncyCastleVersion
- val bouncyUtil = "org.bouncycastle" % "bcutil-jdk15on" % bouncyCastleVersion
- val bouncyPkix = "org.bouncycastle" % "bcpkix-jdk15on" % bouncyCastleVersion
- val bouncyBcpg = "org.bouncycastle" % "bcpg-jdk15on" % bouncyCastleVersion
- val bouncyTls = "org.bouncycastle" % "bctls-jdk15on" % bouncyCastleVersion
- val bouncyCastle: Seq[ModuleID] = Seq(bouncyProv, bouncyUtil, bouncyPkix, bouncyBcpg, bouncyTls)
+ val bouncyProv = "org.bouncycastle" % "bcprov-jdk15on" % bouncyCastleVersion
+ val bouncyUtil = "org.bouncycastle" % "bcutil-jdk15on" % bouncyCastleVersion
+ val bouncyPkix = "org.bouncycastle" % "bcpkix-jdk15on" % bouncyCastleVersion
+ val bouncyBcpg = "org.bouncycastle" % "bcpg-jdk15on" % bouncyCastleVersion
+ val bouncyTls = "org.bouncycastle" % "bctls-jdk15on" % bouncyCastleVersion
+ val bouncyCastle = Seq(bouncyProv, bouncyUtil, bouncyPkix, bouncyBcpg, bouncyTls)
lazy val avro = "org.apache.avro" % "avro" % avroVersion
lazy val avroProtobuf = "org.apache.avro" % "avro-protobuf" % avroVersion
@@ -278,9 +277,10 @@ object Dependencies {
lazy val calciteLinq4J = "org.apache.calcite" % "calcite-linq4j" % calciteVersion
- lazy val s3Sdk = "software.amazon.awssdk" % "s3" % awsSdkVersion
- lazy val stsSdk = "software.amazon.awssdk" % "sts" % awsSdkVersion
- lazy val javaxBind = "javax.xml.bind" % "jaxb-api" % javaxBindVersion
+ lazy val s3Sdk = "software.amazon.awssdk" % "s3" % awsSdkVersion
+ lazy val stsSdk = "software.amazon.awssdk" % "sts" % awsSdkVersion
+ lazy val javaxBind = "javax.xml.bind" % "jaxb-api" % javaxBindVersion
+ lazy val awsOpenSearch = "software.amazon.awssdk" % "opensearch" % awsSdkVersion
lazy val azureDataLakeSdk: ModuleID = "com.azure" % "azure-storage-file-datalake" % azureDataLakeVersion
lazy val azureIdentity: ModuleID = "com.azure" % "azure-identity" % azureIdentityVersion
@@ -387,15 +387,18 @@ object Dependencies {
lazy val festAssert = "org.easytesting" % "fest-assert" % "1.4"
def elastic4sCore(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-core" % v
+ def elastic4sCats(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-effect-cats-3" % v
def elastic4sClient(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-client-esjava" % v
def elastic4sTestKit(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-testkit" % v
def elastic4sHttp(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-http" % v
- def elasticSearch(v: String): ModuleID = "org.elasticsearch" % "elasticsearch" % v
- def elasticSearchAnalysis(v: String): ModuleID = "org.codelibs.elasticsearch.module" % "analysis-common" % v
+ def elasticSearch(v: String): ModuleID = "co.elastic.clients" % "elasticsearch-java" % v
+ def elasticSearchAnalysis(v: String): ModuleID = "org.codelibs.elasticsearch.module" % "analysis-common" % v
def jna(v: String): ModuleID = "net.java.dev.jna" % "jna" % v
+ val openSearchRest: ModuleID = "org.opensearch.client" % "opensearch-rest-client" % "2.9.0"
+ val openSearchJava: ModuleID = "org.opensearch.client" % "opensearch-java" % "2.6.0"
}
trait Dependencies {
@@ -542,15 +545,16 @@ trait Dependencies {
testContainersScalaElasticsearch,
)
- val kafkaConnectElastic6Deps: Seq[ModuleID] =
- elasticCommonDeps(Elastic6Versions) ++ Seq(elastic4sHttp(Elastic6Versions.elastic4sVersion))
+ val kafkaConnectElasticBaseDeps: Seq[ModuleID] =
+ Seq[ModuleID]()
- val kafkaConnectElastic6TestDeps: Seq[ModuleID] = baseTestDeps ++ elasticTestCommonDeps(Elastic6Versions)
+ val kafkaConnectElastic8Deps: Seq[ModuleID] =
+ kafkaConnectElasticBaseDeps ++ Seq(elastic4sClient(Elastic8Versions.elastic4sVersion))
- val kafkaConnectElastic7Deps: Seq[ModuleID] =
- elasticCommonDeps(Elastic7Versions) ++ Seq(elastic4sClient(Elastic7Versions.elastic4sVersion))
+ val kafkaConnectOpenSearchDeps: Seq[ModuleID] =
+ kafkaConnectElasticBaseDeps ++ Seq(openSearchRest, openSearchJava, awsOpenSearch, stsSdk)
- val kafkaConnectElastic7TestDeps: Seq[ModuleID] = baseTestDeps ++ elasticTestCommonDeps(Elastic7Versions)
+ val kafkaConnectElastic8TestDeps: Seq[ModuleID] = baseTestDeps ++ elasticTestCommonDeps(Elastic8Versions)
val kafkaConnectFtpDeps: Seq[ModuleID] = Seq(commonsNet, commonsCodec, commonsIO, commonsLang3, jsch)
@@ -578,7 +582,7 @@ trait Dependencies {
testcontainersMongodb,
jedis,
mongoDb,
- )
+ ) ++ bouncyCastle
val nettyOverrides: Seq[ModuleID] = Seq(
nettyCommon,
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala
index 3a49ee9e3..9c47fb7c6 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala
@@ -15,38 +15,90 @@
*/
package io.lenses.streamreactor.connect.testcontainers
-import io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer.defaultNetworkAlias
-import io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer.defaultTag
+import cats.implicits.catsSyntaxOptionId
+import cats.implicits.none
+import org.scalatest.Assertions.fail
import org.testcontainers.elasticsearch.{ ElasticsearchContainer => JavaElasticsearchContainer }
import org.testcontainers.utility.DockerImageName
+case class ElasticContainerSetup(
+ key: String,
+ imageUrl: String,
+ imageVersion: String,
+ compatibleSubstituteFor: Option[String],
+ envs: Seq[(String, String)],
+)
+object ElasticsearchContainer {
+
+ private val setup: Map[String, ElasticContainerSetup] =
+ Seq(
+ ElasticContainerSetup("elastic8",
+ "docker.elastic.co/elasticsearch/elasticsearch",
+ "8.10.1",
+ none,
+ Seq("xpack.security.enabled" -> "false"),
+ ),
+ ElasticContainerSetup("elastic8-ssl", "docker.elastic.co/elasticsearch/elasticsearch", "8.10.1", none, Seq.empty),
+ ElasticContainerSetup(
+ "open",
+ "opensearchproject/opensearch",
+ "2.10.0",
+ "docker.elastic.co/elasticsearch/elasticsearch".some,
+ Seq("plugins.security.disabled" -> "true"),
+ ),
+ ElasticContainerSetup(
+ "open-ssl",
+ "opensearchproject/opensearch",
+ "2.10.0",
+ "docker.elastic.co/elasticsearch/elasticsearch".some,
+ Seq(
+ "plugins.security.ssl.http.enabled" -> "true",
+ "plugins.security.ssl.http.keystore_type" -> "jks",
+ "plugins.security.ssl.http.keystore_filepath" -> "security/keystore.jks",
+ "plugins.security.ssl.http.keystore_password" -> "changeIt",
+ "plugins.security.ssl.http.truststore_type" -> "jks",
+ "plugins.security.ssl.http.truststore_filepath" -> "security/truststore.jks",
+ "plugins.security.ssl.http.truststore_password" -> "changeIt",
+
+// "plugins.security.ssl.transport.keystore_type" -> "jks",
+// "plugins.security.ssl.transport.keystore_filepath" -> "security/keystore.jks",
+// "plugins.security.ssl.transport.keystore_password" -> "changeIt",
+// "plugins.security.ssl.transport.truststore_type" -> "jks",
+// "plugins.security.ssl.transport.truststore_filepath" -> "security/truststore.jks",
+// "plugins.security.ssl.transport.truststore_password" -> "changeIt",
+ ),
+ ),
+ ).map(ec => ec.key -> ec).toMap
+ def apply(containerKey: String): ElasticsearchContainer = {
+ val version = setup.getOrElse(containerKey, fail("Container not found"))
+ new ElasticsearchContainer(version)
+ }
+
+}
class ElasticsearchContainer(
- dockerImage: DockerImageName,
- dockerTag: String = defaultTag,
- val networkAlias: String = defaultNetworkAlias,
+ val setup: ElasticContainerSetup,
) extends SingleContainer[JavaElasticsearchContainer] {
val port: Int = 9200
- override val container: JavaElasticsearchContainer =
- new JavaElasticsearchContainer(dockerImage.withTag(dockerTag))
- container.withNetworkAliases(networkAlias)
+ override val container: JavaElasticsearchContainer = {
+ val image = DockerImageName
+ .parse(setup.imageUrl)
+ .withTag(setup.imageVersion)
+ val imageWithSub = setup.compatibleSubstituteFor.fold(image)(
+ image.asCompatibleSubstituteFor,
+ )
+ new JavaElasticsearchContainer(imageWithSub)
+ }
+
+ container.withNetworkAliases(setup.key)
+
+ setup.envs.foreach { case (k, v) => container.withEnv(k, v) }
lazy val hostNetwork = new HostNetwork()
class HostNetwork {
def httpHostAddress: String = container.getHttpHostAddress
}
-}
-object ElasticsearchContainer {
- private val dockerImage = DockerImageName.parse("docker.elastic.co/elasticsearch/elasticsearch")
- private val defaultTag = "6.8.8"
- private val defaultNetworkAlias = "elastic"
-
- def apply(
- networkAlias: String = defaultNetworkAlias,
- dockerTag: String = defaultTag,
- ): ElasticsearchContainer =
- new ElasticsearchContainer(dockerImage, dockerTag, networkAlias)
}
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala
index 619df81a5..5c37239db 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala
@@ -19,6 +19,7 @@ import com.github.dockerjava.api.model.Ulimit
import io.lenses.streamreactor.connect.testcontainers.KafkaVersions.ConfluentVersion
import io.lenses.streamreactor.connect.testcontainers.KafkaConnectContainer.defaultNetworkAlias
import io.lenses.streamreactor.connect.testcontainers.KafkaConnectContainer.defaultRestPort
+import org.testcontainers.containers.BindMode
import org.testcontainers.containers.GenericContainer
import org.testcontainers.containers.KafkaContainer
import org.testcontainers.containers.wait.strategy.Wait
@@ -91,6 +92,13 @@ class KafkaConnectContainer(
def installPackage(pkg: String): ExecResult =
rootExecInContainer(container = this, commands = Seq(s"microdnf", "install", pkg))
+
+ def copyBinds(binds: Seq[(String, String)]): Unit =
+ binds.foreach {
+ case (k, v) =>
+ addFileSystemBind(k, v, BindMode.READ_WRITE)
+ }
+
}
object KafkaConnectContainer {
private val dockerImage = DockerImageName.parse("confluentinc/cp-kafka-connect")
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala
index 9b89dde27..9ed2a2093 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala
@@ -19,7 +19,7 @@ import com.typesafe.scalalogging.LazyLogging
object KafkaVersions extends LazyLogging {
- private val FallbackConfluentVersion = "7.3.1"
+ private val FallbackConfluentVersion = "7.5.0"
val ConfluentVersion: String = {
val (vers, from) = sys.env.get("CONFLUENT_VERSION") match {
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala
index aa68e7574..4e6dd446c 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala
@@ -25,7 +25,7 @@ case class ConnectorConfiguration(
implicit val formats: DefaultFormats.type = DefaultFormats
- def toJson(): String = {
+ def toJson: String = {
val mergedConfigMap = config + ("tasks.max" -> ConfigValue(1))
Serialization.write(
Map[String, Any](
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala
index 60c719049..616f8a200 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala
@@ -50,7 +50,7 @@ class KafkaConnectClient(kafkaConnectContainer: KafkaConnectContainer) extends S
timeoutSeconds: Long = 10L,
): Unit = {
val httpPost = HttpRequest.newBuilder()
- .POST(HttpRequest.BodyPublishers.ofString(connector.toJson()))
+ .POST(HttpRequest.BodyPublishers.ofString(connector.toJson))
.uri(URI.create(s"${kafkaConnectContainer.hostNetwork.restEndpointUrl}/connectors"))
.header("Accept", "application/json")
.header("Content-Type", "application/json")
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/KeyStoreUtils.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/KeyStoreUtils.scala
new file mode 100644
index 000000000..3e897342a
--- /dev/null
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/KeyStoreUtils.scala
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.testcontainers.scalatest
+
+import com.typesafe.scalalogging.LazyLogging
+import org.bouncycastle.asn1.x500.X500Name
+import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo
+import org.bouncycastle.cert.X509v3CertificateBuilder
+import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter
+import org.bouncycastle.jce.provider.BouncyCastleProvider
+import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder
+
+import java.io.FileOutputStream
+import java.math.BigInteger
+import java.nio.file.Files
+import java.nio.file.Path
+import java.security.cert.X509Certificate
+import java.security.interfaces.RSAPrivateKey
+import java.security.KeyPairGenerator
+import java.security.KeyStore
+import java.security.Security
+import java.util.Date
+
+object KeyStoreUtils extends LazyLogging {
+ Security.addProvider(new BouncyCastleProvider())
+
+ def createKeystore(commonName: String): Path = {
+
+ val tmpDir: Path = Files.createTempDirectory("security")
+
+ val (certificate, privateKey) = KeyStoreUtils.generateSelfSignedCertificate(2048, 365, commonName)
+ val _ = KeyStoreUtils.createAndSaveKeystore(tmpDir, "changeIt", certificate, privateKey)
+ val _ = KeyStoreUtils.createAndSaveTruststore(tmpDir, "changeIt", certificate)
+ logger.info(s"container -> Creating keystore at $tmpDir")
+ tmpDir
+ }
+
+ def generateSelfSignedCertificate(
+ keySize: Int,
+ certificateValidityDays: Int,
+ commonName: String,
+ ): (X509Certificate, RSAPrivateKey) = {
+ val keyPairGen = KeyPairGenerator.getInstance("RSA", "BC")
+ keyPairGen.initialize(keySize)
+ val keyPair = keyPairGen.generateKeyPair()
+
+ val notBefore = new Date()
+ val notAfter = new Date(System.currentTimeMillis() + certificateValidityDays * 24L * 60 * 60 * 1000)
+
+ val publicKeyInfo = SubjectPublicKeyInfo.getInstance(keyPair.getPublic.getEncoded)
+
+ val certBuilder = new X509v3CertificateBuilder(
+ new X500Name(s"CN=$commonName"),
+ BigInteger.valueOf(System.currentTimeMillis()),
+ notBefore,
+ notAfter,
+ new X500Name(s"CN=$commonName"),
+ publicKeyInfo,
+ )
+
+ val contentSigner =
+ new JcaContentSignerBuilder("SHA256WithRSAEncryption").setProvider("BC").build(keyPair.getPrivate)
+ val certHolder = certBuilder.build(contentSigner)
+ val cert = new JcaX509CertificateConverter().setProvider("BC").getCertificate(certHolder)
+
+ (cert, keyPair.getPrivate.asInstanceOf[RSAPrivateKey])
+ }
+
+ def createAndSaveKeystore(
+ tmpDir: Path,
+ password: String,
+ certificate: X509Certificate,
+ privateKey: RSAPrivateKey,
+ ): String = {
+
+ val keyStore = KeyStore.getInstance("JKS")
+ keyStore.load(null, password.toCharArray)
+
+ // Store the private key and certificate in the keystore
+ keyStore.setKeyEntry("alias", privateKey, password.toCharArray, Array(certificate))
+
+ val keyStorePath = tmpDir.resolve("keystore.jks").toString
+ // Save the keystore to a file
+ val keystoreOutputStream = new FileOutputStream(keyStorePath)
+ keyStore.store(keystoreOutputStream, password.toCharArray)
+ keystoreOutputStream.close()
+
+ keyStorePath
+ }
+
+ def createAndSaveTruststore(tmpDir: Path, password: String, certificate: X509Certificate): String = {
+
+ val trustStore = KeyStore.getInstance("JKS")
+ trustStore.load(null, password.toCharArray)
+
+ // Add the trusted certificate to the truststore
+ trustStore.setCertificateEntry("alias", certificate)
+ val trustStorePath = tmpDir.resolve("truststore.jks").toString
+
+ // Save the truststore to a file
+ val truststoreOutputStream = new FileOutputStream(trustStorePath)
+ trustStore.store(truststoreOutputStream, password.toCharArray)
+ truststoreOutputStream.close()
+
+ trustStorePath
+ }
+
+}
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala
index e145f6a06..d3c71bca5 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala
@@ -36,11 +36,14 @@ import org.scalatest.time.Span
import org.scalatest.AsyncTestSuite
import org.scalatest.BeforeAndAfterAll
import org.testcontainers.containers.output.Slf4jLogConsumer
+import org.testcontainers.containers.BindMode
+import org.testcontainers.containers.GenericContainer
import org.testcontainers.containers.KafkaContainer
import org.testcontainers.containers.Network
import org.testcontainers.utility.DockerImageName
import java.nio.file.Files
+import java.nio.file.Path
import java.nio.file.Paths
import java.time.Duration
import java.util.Properties
@@ -56,8 +59,14 @@ trait StreamReactorContainerPerSuite extends BeforeAndAfterAll with Eventually w
override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(1, Minute))
+ val commonName: Option[String] = None
+
val network: Network = Network.SHARED
+ def useKeyStore: Boolean = false
+
+ def keyStorePath: Option[Path] = Option.when(useKeyStore)(KeyStoreUtils.createKeystore(commonName.fold("")(e => e)))
+
def connectorModule: String
def providedJars(): Seq[String] = Seq()
@@ -68,14 +77,32 @@ trait StreamReactorContainerPerSuite extends BeforeAndAfterAll with Eventually w
.withLogConsumer(new Slf4jLogConsumer(logger.underlying))
lazy val kafkaConnectContainer: KafkaConnectContainer = {
- KafkaConnectContainer(
+ val c = KafkaConnectContainer(
kafkaContainer = kafkaContainer,
schemaRegistryContainer = schemaRegistryContainer,
connectPluginPath = Some(connectPluginPath()),
providedJars = providedJars(),
- ).withNetwork(network).withLogConsumer(new Slf4jLogConsumer(logger.underlying))
+ )
+ .withNetwork(network)
+ .withLogConsumer(new Slf4jLogConsumer(logger.underlying))
+
+ copyBinds(c, "/security")
+ c
}
+ protected def copyBinds(container: GenericContainer[_], path: String): Unit =
+ keyStorePath.foreach {
+ ksp =>
+ container.addFileSystemBind(ksp.resolve("keystore.jks").toAbsolutePath.toString,
+ s"$path/keystore.jks",
+ BindMode.READ_WRITE,
+ )
+ container.addFileSystemBind(ksp.resolve("truststore.jks").toAbsolutePath.toString,
+ s"$path/truststore.jks",
+ BindMode.READ_WRITE,
+ )
+ }
+
// Override for different SchemaRegistryContainer configs
val schemaRegistryContainer: Option[SchemaRegistryContainer] =
SchemaRegistryContainer(kafkaContainer = kafkaContainer)