diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..1a425085 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Scala Steward: Reformat with scalafmt 3.8.6 +c5e0595371a88f64dd1f32c9f226cea46132ccc7 diff --git a/.scalafmt.conf b/.scalafmt.conf index df50c7b6..a6bafde6 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,6 +1,6 @@ preset = defaultWithAlign //@formatter on -version = 3.7.17 +version = 3.8.6 runner.dialect = scala3 align.preset = more danglingParentheses.preset = true diff --git a/build.sbt b/build.sbt index 5bea79ff..917d1711 100644 --- a/build.sbt +++ b/build.sbt @@ -6,13 +6,15 @@ sonatypeProfileName := "com.crobox" lazy val root = (project in file(".")) .settings( - publish := {}, + publish := {}, publishArtifact := false, inThisBuild( List( organization := "com.crobox.clickhouse", - homepage := Some(url("https://github.com/crobox/clickhouse-scala-client")), - licenses := List("The GNU Lesser General Public License, Version 3.0" -> url("http://www.gnu.org/licenses/lgpl-3.0.txt")), + homepage := Some(url("https://github.com/crobox/clickhouse-scala-client")), + licenses := List( + "The GNU Lesser General Public License, Version 3.0" -> url("http://www.gnu.org/licenses/lgpl-3.0.txt") + ), developers := List( Developer( "crobox", @@ -21,10 +23,10 @@ lazy val root = (project in file(".")) url("https://crobox.com") ) ), - scalaVersion := "2.13.16", + scalaVersion := "2.13.16", crossScalaVersions := List("2.13.16", "3.3.1"), javacOptions ++= Seq("-g", "-Xlint:unchecked", "-Xlint:deprecation", "-source", "11", "-target", "11"), - scalacOptions ++= Seq("-unchecked", "-deprecation", "-feature", "-language:_", "-encoding", "UTF-8"), + scalacOptions ++= Seq("-unchecked", "-deprecation", "-feature", "-language:_", "-encoding", "UTF-8") ) ), name := "clickhouse" @@ -35,7 +37,7 @@ lazy val client: Project = (project in file("client")) .configs(Config.CustomIntegrationTest) .settings(Config.testSettings: _*) .settings( - name := "client", + name := "client", sbtrelease.ReleasePlugin.autoImport.releasePublishArtifactsAction := PgpKeys.publishSigned.value, libraryDependencies ++= Seq( "io.spray" %% "spray-json" % "1.3.6", @@ -43,8 +45,8 @@ lazy val client: Project = (project in file("client")) "org.apache.pekko" %% "pekko-stream" % PekkoVersion, "org.apache.pekko" %% "pekko-http" % PekkoHttpVersion, "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5", - "joda-time" % "joda-time" % "2.13.0" - ) ++ Seq("org.apache.pekko" %% "pekko-testkit" % PekkoVersion % Test) ++ Build.testDependencies.map(_ % Test) + "joda-time" % "joda-time" % "2.13.0" + ) ++ Seq("org.apache.pekko" %% "pekko-testkit" % PekkoVersion % Test) ++ Build.testDependencies.map(_ % Test) ) lazy val dsl = (project in file("dsl")) @@ -52,11 +54,11 @@ lazy val dsl = (project in file("dsl")) .configs(Config.CustomIntegrationTest) .settings(Config.testSettings: _*) .settings( - name := "dsl", + name := "dsl", sbtrelease.ReleasePlugin.autoImport.releasePublishArtifactsAction := PgpKeys.publishSigned.value, libraryDependencies ++= Seq( - "com.google.guava" % "guava" % "33.4.0-jre", - "com.typesafe" % "config" % "1.4.3" + "com.google.guava" % "guava" % "33.4.0-jre", + "com.typesafe" % "config" % "1.4.3" ) ) // .settings(excludeDependencies ++= Seq(ExclusionRule("org.apache.pekko"))) @@ -64,7 +66,7 @@ lazy val dsl = (project in file("dsl")) lazy val testkit = (project in file("testkit")) .dependsOn(client) .settings( - name := "testkit", + name := "testkit", sbtrelease.ReleasePlugin.autoImport.releasePublishArtifactsAction := PgpKeys.publishSigned.value, libraryDependencies ++= Build.testDependencies ) diff --git a/client/src/main/scala/com/crobox/clickhouse/ClickhouseClient.scala b/client/src/main/scala/com/crobox/clickhouse/ClickhouseClient.scala index 26b72608..89632534 100644 --- a/client/src/main/scala/com/crobox/clickhouse/ClickhouseClient.scala +++ b/client/src/main/scala/com/crobox/clickhouse/ClickhouseClient.scala @@ -18,12 +18,14 @@ import scala.concurrent.{Await, ExecutionContext, Future} /** * Async clickhouse client using Pekko Http and Streams * - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 31-03-17 */ -class ClickhouseClient(configuration: Option[Config] = None, - override val customConnectionContext: Option[HttpsConnectionContext] = None) - extends ClickHouseExecutor +class ClickhouseClient( + configuration: Option[Config] = None, + override val customConnectionContext: Option[HttpsConnectionContext] = None +) extends ClickHouseExecutor with ClickhouseResponseParser with ClickhouseQueryBuilder { @@ -42,21 +44,24 @@ class ClickhouseClient(configuration: Option[Config] = None, /** * Execute a read-only query on Clickhouse * - * @param sql a valid Clickhouse SQL string - * @return Future with the result that clickhouse returns + * @param sql + * a valid Clickhouse SQL string + * @return + * Future with the result that clickhouse returns */ def query(sql: String)(implicit settings: QuerySettings = QuerySettings(ReadQueries)): Future[String] = executeRequest(sql, settings.copy(readOnly = ReadQueries, idempotent = settings.idempotent.orElse(Some(true)))) /** - * Execute a read-only query on Clickhouse - * Experimental api, may change in the future. + * Execute a read-only query on Clickhouse Experimental api, may change in the future. * - * @param sql a valid Clickhouse SQL string - * @return stream with the query progress (started/rejected/finished/failed) which materializes with the query result + * @param sql + * a valid Clickhouse SQL string + * @return + * stream with the query progress (started/rejected/finished/failed) which materializes with the query result */ - def queryWithProgress(sql: String)( - implicit settings: QuerySettings = QuerySettings(ReadQueries) + def queryWithProgress(sql: String)(implicit + settings: QuerySettings = QuerySettings(ReadQueries) ): Source[QueryProgress, Future[String]] = executeRequestWithProgress( sql, @@ -64,11 +69,13 @@ class ClickhouseClient(configuration: Option[Config] = None, ) /** - * Execute a query that is modifying the state of the database. e.g. INSERT, SET, CREATE TABLE. - * For security purposes SELECT and SHOW queries are not allowed, use the .query() method for those. + * Execute a query that is modifying the state of the database. e.g. INSERT, SET, CREATE TABLE. For security purposes + * SELECT and SHOW queries are not allowed, use the .query() method for those. * - * @param sql a valid Clickhouse SQL string - * @return Future with the result that clickhouse returns + * @param sql + * a valid Clickhouse SQL string + * @return + * Future with the result that clickhouse returns */ def execute(sql: String)(implicit settings: QuerySettings = QuerySettings(AllQueries)): Future[String] = Future { @@ -87,16 +94,17 @@ class ClickhouseClient(configuration: Option[Config] = None, /** * Creates a stream of the SQL query that will delimit the result from Clickhouse on new-line * - * @param sql a valid Clickhouse SQL string + * @param sql + * a valid Clickhouse SQL string */ def source(sql: String)(implicit settings: QuerySettings = QuerySettings(ReadQueries)): Source[String, NotUsed] = sourceByteString(sql).via(Framing.delimiter(ByteString("\n"), MaximumFrameLength)).map(_.utf8String) /** - * Creates a stream of the SQL query that will emit every result as a ByteString - * It will not retry the queries. + * Creates a stream of the SQL query that will emit every result as a ByteString It will not retry the queries. * - * @param sql a valid Clickhouse SQL string + * @param sql + * a valid Clickhouse SQL string */ def sourceByteString( sql: String @@ -108,22 +116,24 @@ class ClickhouseClient(configuration: Option[Config] = None, .flatMapConcat(response => response.entity.withoutSizeLimit().dataBytes) /** - * Accepts a source of Strings that it will stream to Clickhouse - * It will not retry the query as this will run the source once for every retry and might have - * unexpected consequences. + * Accepts a source of Strings that it will stream to Clickhouse It will not retry the query as this will run the + * source once for every retry and might have unexpected consequences. * - * @param sql a valid Clickhouse SQL INSERT statement - * @param source the Source with strings - * @return Future with the result that clickhouse returns + * @param sql + * a valid Clickhouse SQL INSERT statement + * @param source + * the Source with strings + * @return + * Future with the result that clickhouse returns */ - def sink(sql: String, source: Source[ByteString, Any])( - implicit settings: QuerySettings = QuerySettings(AllQueries) + def sink(sql: String, source: Source[ByteString, Any])(implicit + settings: QuerySettings = QuerySettings(AllQueries) ): Future[String] = { val entity = HttpEntity.apply(ContentTypes.`text/plain(UTF-8)`, source) executeRequestInternal(hostBalancer.nextHost, sql, queryIdentifier, settings, Option(entity), None) } - val serverVersion: ClickhouseServerVersion = { + val serverVersion: ClickhouseServerVersion = try { val path = "crobox.clickhouse.server.version" val cfg = configuration.getOrElse(ConfigFactory.load()) @@ -132,12 +142,11 @@ class ClickhouseClient(configuration: Option[Config] = None, } else { Await.result( query("select version()")(QuerySettings(ReadQueries).copy(retries = Option(0))) - .recover { - case x: ClickhouseException => - val key = "(version " - val idx = x.getMessage.indexOf(key) - if (idx > 0) x.getMessage.substring(idx + key.length, x.getMessage.indexOf(")", idx + key.length)) - else "Unknown" + .recover { case x: ClickhouseException => + val key = "(version " + val idx = x.getMessage.indexOf(key) + if (idx > 0) x.getMessage.substring(idx + key.length, x.getMessage.indexOf(")", idx + key.length)) + else "Unknown" } .map(ClickhouseServerVersion(_)), 5.seconds @@ -151,5 +160,4 @@ class ClickhouseClient(configuration: Option[Config] = None, logger.error(s"Can't determine Clickhouse Server Version. Falling back to: $latest. Error: ${x.getMessage}", x) latest } - } } diff --git a/client/src/main/scala/com/crobox/clickhouse/balancing/ClusterAwareHostBalancer.scala b/client/src/main/scala/com/crobox/clickhouse/balancing/ClusterAwareHostBalancer.scala index bd9fdbac..efce488a 100644 --- a/client/src/main/scala/com/crobox/clickhouse/balancing/ClusterAwareHostBalancer.scala +++ b/client/src/main/scala/com/crobox/clickhouse/balancing/ClusterAwareHostBalancer.scala @@ -13,15 +13,16 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} /** - * Host balancer that does a round robin on all the entries found in the `system.clusters` table. - * It assumes that the service itself can access directly the clickhouse nodes and that the default port `8123` is used - * for every node. - **/ -case class ClusterAwareHostBalancer(host: Uri, - cluster: String = "cluster", - manager: ActorRef, - scanningInterval: FiniteDuration)( - implicit system: ActorSystem, + * Host balancer that does a round robin on all the entries found in the `system.clusters` table. It assumes that the + * service itself can access directly the clickhouse nodes and that the default port `8123` is used for every node. + */ +case class ClusterAwareHostBalancer( + host: Uri, + cluster: String = "cluster", + manager: ActorRef, + scanningInterval: FiniteDuration +)(implicit + system: ActorSystem, connectionRetrievalTimeout: Timeout, ec: ExecutionContext ) extends HostBalancer { @@ -29,14 +30,14 @@ case class ClusterAwareHostBalancer(host: Uri, ClusterConnectionFlow .clusterConnectionsFlow(Future.successful(host), scanningInterval, cluster) .withAttributes( - ActorAttributes.supervisionStrategy({ + ActorAttributes.supervisionStrategy { case ex: IllegalArgumentException => logger.error("Failed resolving hosts for cluster, stopping the flow.", ex) Supervision.stop case ex => logger.error("Failed resolving hosts for cluster, resuming.", ex) Supervision.Resume - }) + } ) .runWith(Sink.actorRef(manager, LogDeadConnections, throwable => logger.error(throwable.getMessage, throwable))) diff --git a/client/src/main/scala/com/crobox/clickhouse/balancing/HostBalancer.scala b/client/src/main/scala/com/crobox/clickhouse/balancing/HostBalancer.scala index bc57d921..f2c962b5 100644 --- a/client/src/main/scala/com/crobox/clickhouse/balancing/HostBalancer.scala +++ b/client/src/main/scala/com/crobox/clickhouse/balancing/HostBalancer.scala @@ -30,12 +30,14 @@ object HostBalancer extends ClickhouseHostBuilder { case SingleHost => SingleHostBalancer(connectionHostFromConfig) case BalancingHosts => val manager = system.actorOf(ConnectionManagerActor.props(ClickhouseHostHealth.healthFlow(_))) - MultiHostBalancer(connectionConfig - .getConfigList("hosts") - .asScala - .toSet - .map((config: Config) => extractHost(config)), - manager) + MultiHostBalancer( + connectionConfig + .getConfigList("hosts") + .asScala + .toSet + .map((config: Config) => extractHost(config)), + manager + ) case ClusterAware => val manager = system.actorOf(ConnectionManagerActor.props(ClickhouseHostHealth.healthFlow(_))) ClusterAwareHostBalancer( @@ -48,6 +50,8 @@ object HostBalancer extends ClickhouseHostBuilder { } def extractHost(connectionConfig: Config): Uri = - toHost(connectionConfig.getString("host"), - if (connectionConfig.hasPath("port")) Option(connectionConfig.getInt("port")) else None) + toHost( + connectionConfig.getString("host"), + if (connectionConfig.hasPath("port")) Option(connectionConfig.getInt("port")) else None + ) } diff --git a/client/src/main/scala/com/crobox/clickhouse/balancing/SingleHostBalancer.scala b/client/src/main/scala/com/crobox/clickhouse/balancing/SingleHostBalancer.scala index 4e5b9dfe..66b39d03 100644 --- a/client/src/main/scala/com/crobox/clickhouse/balancing/SingleHostBalancer.scala +++ b/client/src/main/scala/com/crobox/clickhouse/balancing/SingleHostBalancer.scala @@ -6,7 +6,7 @@ import scala.concurrent.Future /** * The default host balancer which always provides the same host. - **/ + */ case class SingleHostBalancer(host: Uri) extends HostBalancer { override def nextHost: Future[Uri] = Future.successful(host) diff --git a/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/ConnectionManagerActor.scala b/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/ConnectionManagerActor.scala index facb02c5..79555e6c 100644 --- a/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/ConnectionManagerActor.scala +++ b/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/ConnectionManagerActor.scala @@ -12,16 +12,18 @@ import com.typesafe.config.Config import scala.collection.mutable import scala.concurrent.duration._ -class ConnectionManagerActor(healthSource: Uri => Source[ClickhouseHostStatus, Cancellable], - optionalConfig: Option[Config])( - implicit materializer: Materializer +class ConnectionManagerActor( + healthSource: Uri => Source[ClickhouseHostStatus, Cancellable], + optionalConfig: Option[Config] +)(implicit + materializer: Materializer ) extends Actor with ActorLogging with Stash { import ConnectionManagerActor._ - private val config = optionalConfig.getOrElse(context.system.settings.config).getConfig("connection") + private val config = optionalConfig.getOrElse(context.system.settings.config).getConfig("connection") private val fallbackToConfigurationHost = config.getBoolean("fallback-to-config-host-during-initialization") // state @@ -38,7 +40,7 @@ class ConnectionManagerActor(healthSource: Uri => Source[ClickhouseHostStatus, C override def receive: Receive = { case Connections(hosts) => hosts - .foreach(host => { + .foreach(host => if (!currentConfiguredHosts.contains(host)) { log.info(s"Setting up host health checks for host $host") hostHealthScheduler.put( @@ -50,7 +52,7 @@ class ConnectionManagerActor(healthSource: Uri => Source[ClickhouseHostStatus, C .run() ) } - }) + ) currentConfiguredHosts = hosts case GetConnection() => @@ -99,8 +101,8 @@ class ConnectionManagerActor(healthSource: Uri => Source[ClickhouseHostStatus, C } case LogDeadConnections => - val deadHosts = hostsStatus.values.collect { - case Dead(host, _) => host + val deadHosts = hostsStatus.values.collect { case Dead(host, _) => + host } if (deadHosts.nonEmpty) log.error(s"Hosts ${deadHosts.mkString(" - ")} are still unreachable") diff --git a/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlow.scala b/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlow.scala index 41e5da99..e6f7f0da 100644 --- a/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlow.scala +++ b/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlow.scala @@ -38,7 +38,7 @@ private[clickhouse] object ClusterConnectionFlow Source .tick(0.millis, scanningInterval, {}) .mapAsync(1)(_ => targetHost) - .mapAsync(1)(host => { + .mapAsync(1) { host => val query = s"SELECT host_address FROM system.clusters WHERE cluster='$cluster'" val request = toRequest(host, query, None, QuerySettings(readOnly = ReadQueries, idempotent = Some(true)), None)( @@ -47,16 +47,16 @@ private[clickhouse] object ClusterConnectionFlow processClickhouseResponse(http.singleRequest(request, settings = settings), query, host, None) .map(splitResponse) .map(_.toSet.filter(_.nonEmpty)) - .map(result => { + .map { result => if (result.isEmpty) { throw new IllegalArgumentException( s"Could not determine clickhouse cluster hosts for cluster $cluster and host $host. " + - s"This could indicate that you are trying to use the cluster balancer to connect to a non cluster based clickhouse server. " + - s"Please use the `SingleHostQueryBalancer` in that case." + s"This could indicate that you are trying to use the cluster balancer to connect to a non cluster based clickhouse server. " + + s"Please use the `SingleHostQueryBalancer` in that case." ) } Connections(result.map(ClickhouseHostBuilder.toHost(_, Some(8123)))) - }) - }) + } + } } } diff --git a/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/health/ClickhouseHostHealth.scala b/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/health/ClickhouseHostHealth.scala index 33f39a17..c1382124 100644 --- a/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/health/ClickhouseHostHealth.scala +++ b/client/src/main/scala/com/crobox/clickhouse/balancing/discovery/health/ClickhouseHostHealth.scala @@ -26,14 +26,15 @@ object ClickhouseHostHealth extends ClickhouseResponseParser { case class Dead(host: Uri, reason: Throwable) extends ClickhouseHostStatus { override val code: String = "nok" } /** - * Creates a source which emits the health status at most every `health-check.interval` interval. - * The source uses a cachedHostConnectionPool with a number of one maximum connections and one maximum open requests. This is configured on - * the provided actor system and assumes there is no other user of such a pool, so it will not be shared. - * This ensures the health checks will not affect the clients `superPool` in any way, and it will not fill the queue if one hosts hangs when returning the response. - * We also set the connection idle timeout to `health-check.timeout + health-check.interval` to ensure that the pool will be blocked with on hanging request. - * */ - def healthFlow(host: Uri)( - implicit system: ActorSystem, + * Creates a source which emits the health status at most every `health-check.interval` interval. The source uses a + * cachedHostConnectionPool with a number of one maximum connections and one maximum open requests. This is configured + * on the provided actor system and assumes there is no other user of such a pool, so it will not be shared. This + * ensures the health checks will not affect the clients `superPool` in any way, and it will not fill the queue if one + * hosts hangs when returning the response. We also set the connection idle timeout to + * `health-check.timeout + health-check.interval` to ensure that the pool will be blocked with on hanging request. + */ + def healthFlow(host: Uri)(implicit + system: ActorSystem, executionContext: ExecutionContext ): Source[ClickhouseHostStatus, Cancellable] = { val healthCheckInterval: FiniteDuration = @@ -74,12 +75,11 @@ object ClickhouseHostHealth extends ClickhouseResponseParser { Unmarshaller .stringUnmarshaller(decodeResponse(response).entity) .map(splitResponse) - .map( - stringResponse => - if (stringResponse.equals(Seq("Ok."))) { - Alive(host) - } else { - Dead(host, new IllegalArgumentException(s"Got wrong result $stringResponse")) + .map(stringResponse => + if (stringResponse.equals(Seq("Ok."))) { + Alive(host) + } else { + Dead(host, new IllegalArgumentException(s"Got wrong result $stringResponse")) } ) case (Success(response), _) => diff --git a/client/src/main/scala/com/crobox/clickhouse/internal/ClickHouseExecutor.scala b/client/src/main/scala/com/crobox/clickhouse/internal/ClickHouseExecutor.scala index 802d1f63..527df052 100644 --- a/client/src/main/scala/com/crobox/clickhouse/internal/ClickHouseExecutor.scala +++ b/client/src/main/scala/com/crobox/clickhouse/internal/ClickHouseExecutor.scala @@ -27,7 +27,7 @@ private[clickhouse] trait ClickHouseExecutor extends LazyLogging { lazy val (progressQueue, progressSource) = { val builtSource = QueryProgress.queryProgressStream.run() - builtSource._2.runWith(Sink.ignore) //ensure we have one sink draining the progress + builtSource._2.runWith(Sink.ignore) // ensure we have one sink draining the progress builtSource } @@ -37,7 +37,8 @@ private[clickhouse] trait ClickHouseExecutor extends LazyLogging { ) private lazy val http = Http() private lazy val connectionContext = customConnectionContext.getOrElse(http.defaultClientHttpsContext) - private lazy val pool = http.superPool[Promise[HttpResponse]](connectionContext = connectionContext, settings = superPoolSettings) + private lazy val pool = + http.superPool[Promise[HttpResponse]](connectionContext = connectionContext, settings = superPoolSettings) private lazy val bufferSize: Int = config.getInt("buffer-size") private lazy val queryRetries: Int = config.getInt("retries") @@ -50,24 +51,28 @@ private[clickhouse] trait ClickHouseExecutor extends LazyLogging { })(Keep.both) .run() - def executeRequest(query: String, - settings: QuerySettings, - entity: Option[RequestEntity] = None, - progressQueue: Option[SourceQueueWithComplete[QueryProgress]] = None): Future[String] = { + def executeRequest( + query: String, + settings: QuerySettings, + entity: Option[RequestEntity] = None, + progressQueue: Option[SourceQueueWithComplete[QueryProgress]] = None + ): Future[String] = { val internalQueryIdentifier = queryIdentifier executeWithRetries(settings.retries.getOrElse(queryRetries), progressQueue, settings) { () => executeRequestInternal(hostBalancer.nextHost, query, internalQueryIdentifier, settings, entity, progressQueue) - }.andThen { - case _ => progressQueue.foreach(_.complete()) + }.andThen { case _ => + progressQueue.foreach(_.complete()) } } protected def queryIdentifier: String = Random.alphanumeric.take(20).mkString("") - def executeRequestWithProgress(query: String, - settings: QuerySettings, - entity: Option[RequestEntity] = None): Source[QueryProgress, Future[String]] = + def executeRequestWithProgress( + query: String, + settings: QuerySettings, + entity: Option[RequestEntity] = None + ): Source[QueryProgress, Future[String]] = Source .queue[QueryProgress](10, OverflowStrategy.dropHead) .mapMaterializedValue(queue => executeRequest(query, settings, entity, Some(queue))) @@ -81,8 +86,8 @@ private[clickhouse] trait ClickHouseExecutor extends LazyLogging { .flatMap(_ => system.terminate()) } - protected def singleRequest(request: HttpRequest, progressEnabled: Boolean): Future[HttpResponse] = { - if(progressEnabled) { + protected def singleRequest(request: HttpRequest, progressEnabled: Boolean): Future[HttpResponse] = + if (progressEnabled) { val promise = Promise[HttpResponse]() queue.offer(request -> promise).flatMap { @@ -94,7 +99,6 @@ private[clickhouse] trait ClickHouseExecutor extends LazyLogging { } else { http.singleRequest(request, connectionContext = connectionContext) } - } protected def executeRequestInternal( host: Future[Uri], @@ -104,30 +108,32 @@ private[clickhouse] trait ClickHouseExecutor extends LazyLogging { entity: Option[RequestEntity] = None, progressQueue: Option[SourceQueueWithComplete[QueryProgress]] ): Future[String] = { - progressQueue.foreach(definedProgressQueue => { - progressSource.runForeach( - progress => { - if (progress.identifier == queryIdentifier) { - definedProgressQueue.offer(progress.progress) - } + progressQueue.foreach(definedProgressQueue => + progressSource.runForeach(progress => + if (progress.identifier == queryIdentifier) { + definedProgressQueue.offer(progress.progress) } ) - }) - host.flatMap(actualHost => { - val request = toRequest(actualHost, - query, - Some(queryIdentifier), - settings.copy( - progressHeaders = settings.progressHeaders.orElse(Some(progressQueue.isDefined)) - ), - entity)(config) + ) + host.flatMap { actualHost => + val request = toRequest( + actualHost, + query, + Some(queryIdentifier), + settings.copy( + progressHeaders = settings.progressHeaders.orElse(Some(progressQueue.isDefined)) + ), + entity + )(config) processClickhouseResponse(singleRequest(request, progressQueue.isDefined), query, actualHost, progressQueue) - }) + } } - private def executeWithRetries(retries: Int, - progressQueue: Option[SourceQueueWithComplete[QueryProgress]], - settings: QuerySettings)( + private def executeWithRetries( + retries: Int, + progressQueue: Option[SourceQueueWithComplete[QueryProgress]], + settings: QuerySettings + )( request: () => Future[String] ): Future[String] = request().recoverWith { diff --git a/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseQueryBuilder.scala b/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseQueryBuilder.scala index 21ed1131..51734d07 100644 --- a/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseQueryBuilder.scala +++ b/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseQueryBuilder.scala @@ -1,7 +1,7 @@ package com.crobox.clickhouse.internal import org.apache.pekko.http.scaladsl.model.Uri.Query -import org.apache.pekko.http.scaladsl.model.headers.{HttpEncodingRange, RawHeader, `Content-Encoding`} +import org.apache.pekko.http.scaladsl.model.headers.{`Content-Encoding`, HttpEncodingRange, RawHeader} import org.apache.pekko.http.scaladsl.model.{HttpMethods, HttpRequest, RequestEntity, Uri} import com.crobox.clickhouse.internal.QuerySettings.ReadQueries import com.crobox.clickhouse.internal.progress.ProgressHeadersAsEventsStage @@ -20,11 +20,13 @@ private[clickhouse] trait ClickhouseQueryBuilder extends LazyLogging { } private val MaxUriSize = 16 * 1024 - protected def toRequest(uri: Uri, - query: String, - queryIdentifier: Option[String], - settings: QuerySettings, - entity: Option[RequestEntity])(config: Config): HttpRequest = { + protected def toRequest( + uri: Uri, + query: String, + queryIdentifier: Option[String], + settings: QuerySettings, + entity: Option[RequestEntity] + )(config: Config): HttpRequest = { val urlQuery = uri.withQuery(Query(Query("query" -> query) ++ settings.withFallback(config).asQueryParams: _*)) entity match { case Some(e) => @@ -41,8 +43,8 @@ private[clickhouse] trait ClickhouseQueryBuilder extends LazyLogging { ) case None if settings.idempotent.contains(true) - && settings.readOnly == ReadQueries - && urlQuery.toString().getBytes.length < MaxUriSize => //max url size + && settings.readOnly == ReadQueries + && urlQuery.toString().getBytes.length < MaxUriSize => // max url size logger.debug(s"Executing clickhouse idempotent query [$query] on host [${uri.toString()}]") HttpRequest( method = HttpMethods.GET, @@ -51,7 +53,7 @@ private[clickhouse] trait ClickhouseQueryBuilder extends LazyLogging { .query() .filterNot( _._1 == "readonly" - ) //get requests are readonly by default, if we send the readonly flag clickhouse will fail the request + ) // get requests are readonly by default, if we send the readonly flag clickhouse will fail the request ), headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _)) ) @@ -65,4 +67,4 @@ private[clickhouse] trait ClickhouseQueryBuilder extends LazyLogging { ) } } -} \ No newline at end of file +} diff --git a/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseResponseParser.scala b/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseResponseParser.scala index 528f9722..6e0436d5 100644 --- a/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseResponseParser.scala +++ b/client/src/main/scala/com/crobox/clickhouse/internal/ClickhouseResponseParser.scala @@ -14,11 +14,13 @@ import scala.util.{Failure, Success} private[clickhouse] trait ClickhouseResponseParser { - protected def processClickhouseResponse(responseFuture: Future[HttpResponse], - query: String, - host: Uri, - progressQueue: Option[SourceQueue[QueryProgress]])( - implicit materializer: Materializer, + protected def processClickhouseResponse( + responseFuture: Future[HttpResponse], + query: String, + host: Uri, + progressQueue: Option[SourceQueue[QueryProgress]] + )(implicit + materializer: Materializer, executionContext: ExecutionContext ): Future[String] = responseFuture.flatMap { response => @@ -26,33 +28,30 @@ private[clickhouse] trait ClickhouseResponseParser { case HttpResponse(StatusCodes.OK, _, entity, _) => Unmarshaller .stringUnmarshaller(entity) - .map(content => { - if (content.contains("DB::Exception")) { //FIXME this is quite a fragile way to detect failures, hopefully nobody will have a valid exception string in the result. Check https://github.com/yandex/ClickHouse/issues/2999 - throw ClickhouseException("Found exception in the query return body", - query, - ClickhouseChunkedException(content), - StatusCodes.OK) + .map { content => + if (content.contains("DB::Exception")) { // FIXME this is quite a fragile way to detect failures, hopefully nobody will have a valid exception string in the result. Check https://github.com/yandex/ClickHouse/issues/2999 + throw ClickhouseException( + "Found exception in the query return body", + query, + ClickhouseChunkedException(content), + StatusCodes.OK + ) } content - }) + } .andThen { case Success(_) => - progressQueue.foreach(queue => { - queue.offer(QueryFinished) - }) + progressQueue.foreach(queue => queue.offer(QueryFinished)) case Failure(exception) => - progressQueue.foreach(queue => { - queue.offer(QueryFailed(exception)) - }) + progressQueue.foreach(queue => queue.offer(QueryFailed(exception))) } case HttpResponse(code, _, entity, _) => progressQueue.foreach(_.offer(QueryRejected)) Unmarshaller .stringUnmarshaller(entity) - .flatMap( - response => - Future.failed( - ClickhouseException(s"Server [$host] returned code $code; $response", query, statusCode = code) + .flatMap(response => + Future.failed( + ClickhouseException(s"Server [$host] returned code $code; $response", query, statusCode = code) ) ) } diff --git a/client/src/main/scala/com/crobox/clickhouse/internal/QuerySettings.scala b/client/src/main/scala/com/crobox/clickhouse/internal/QuerySettings.scala index 6b09f78b..ab40b208 100644 --- a/client/src/main/scala/com/crobox/clickhouse/internal/QuerySettings.scala +++ b/client/src/main/scala/com/crobox/clickhouse/internal/QuerySettings.scala @@ -8,31 +8,29 @@ import org.apache.pekko.http.scaladsl.model.headers.HttpEncoding import scala.jdk.CollectionConverters._ import scala.util.Try -case class QuerySettings(readOnly: ReadOnlySetting = AllQueries, - authentication: Option[(String, String)] = None, - progressHeaders: Option[Boolean] = None, - queryId: Option[String] = None, - profile: Option[String] = None, - httpCompression: Option[Boolean] = None, - settings: Map[String, String] = Map.empty, - idempotent: Option[Boolean] = None, - retries: Option[Int] = None, - requestCompressionType: Option[HttpEncoding] = None) { +case class QuerySettings( + readOnly: ReadOnlySetting = AllQueries, + authentication: Option[(String, String)] = None, + progressHeaders: Option[Boolean] = None, + queryId: Option[String] = None, + profile: Option[String] = None, + httpCompression: Option[Boolean] = None, + settings: Map[String, String] = Map.empty, + idempotent: Option[Boolean] = None, + retries: Option[Int] = None, + requestCompressionType: Option[HttpEncoding] = None +) { def asQueryParams: Query = Query( settings ++ (Seq("readonly" -> readOnly.value.toString) ++ - queryId.map("query_id" -> _) ++ - authentication.map( - auth => "user" -> auth._1 - ) ++ - authentication.map(auth => "password" -> auth._2) ++ - profile.map("profile" -> _) ++ - progressHeaders.map( - progress => "send_progress_in_http_headers" -> (if (progress) "1" else "0") - ) ++ - httpCompression - .map(compression => "enable_http_compression" -> (if (compression) "1" else "0"))).toMap + queryId.map("query_id" -> _) ++ + authentication.map(auth => "user" -> auth._1) ++ + authentication.map(auth => "password" -> auth._2) ++ + profile.map("profile" -> _) ++ + progressHeaders.map(progress => "send_progress_in_http_headers" -> (if (progress) "1" else "0")) ++ + httpCompression + .map(compression => "enable_http_compression" -> (if (compression) "1" else "0"))).toMap ) def withFallback(config: Config): QuerySettings = { @@ -42,10 +40,10 @@ case class QuerySettings(readOnly: ReadOnlySetting = AllQueries, val authConfig = config.getConfig(path("authentication")) (authConfig.getString("user"), authConfig.getString("password")) }.toOption), - profile = profile.orElse(Try { config.getString(path("profile")) }.toOption), - httpCompression = httpCompression.orElse(Try { config.getBoolean(path("http-compression")) }.toOption), + profile = profile.orElse(Try(config.getString(path("profile"))).toOption), + httpCompression = httpCompression.orElse(Try(config.getBoolean(path("http-compression"))).toOption), settings = custom.entrySet().asScala.map(u => (u.getKey, custom.getString(u.getKey))).toMap - ++ settings + ++ settings ) } diff --git a/client/src/main/scala/com/crobox/clickhouse/internal/progress/ClickhouseClientTransport.scala b/client/src/main/scala/com/crobox/clickhouse/internal/progress/ClickhouseClientTransport.scala index a047664d..886a7a1f 100644 --- a/client/src/main/scala/com/crobox/clickhouse/internal/progress/ClickhouseClientTransport.scala +++ b/client/src/main/scala/com/crobox/clickhouse/internal/progress/ClickhouseClientTransport.scala @@ -11,17 +11,20 @@ import org.apache.pekko.util.ByteString import scala.concurrent.Future /** - * Clickhouse sends http progress headers with the name X-ClickHouse-Progress which cannot be handled in a streaming way in Pekko. - * In the request we include our own custom header `X-Internal-Identifier` so we can send the internal query id with the progress - * The progress headers are being intercepted by the transport and sent to an internal source as progress events with the internal query id which will be used to route them to the query progress source - * We just proxy the request/response and do not manipulate them in any way - * */ + * Clickhouse sends http progress headers with the name X-ClickHouse-Progress which cannot be handled in a streaming way + * in Pekko. In the request we include our own custom header `X-Internal-Identifier` so we can send the internal query + * id with the progress The progress headers are being intercepted by the transport and sent to an internal source as + * progress events with the internal query id which will be used to route them to the query progress source We just + * proxy the request/response and do not manipulate them in any way + */ class StreamingProgressClickhouseTransport(source: SourceQueue[String]) extends ClientTransport { override def connectTo( host: String, port: Int, settings: ClientConnectionSettings - )(implicit system: ActorSystem): Flow[ + )(implicit + system: ActorSystem + ): Flow[ ByteString, ByteString, Future[Http.OutgoingConnection] @@ -60,10 +63,10 @@ class ProgressHeadersAsEventsStage(source: SourceQueue[String]) if (queryIdHeader.isEmpty) { log.warning(s"Could not extract the query id from the containing $incomingString") } - queryId = queryIdHeader.map(header => { + queryId = queryIdHeader.map { header => queryMarkedAsAccepted = false header.stripPrefix(InternalQueryIdentifier + ":").trim - }) + } } push(serverOutput, byteString) } @@ -92,31 +95,33 @@ class ProgressHeadersAsEventsStage(source: SourceQueue[String]) progressHeaders .filter(_.contains(ClickhouseProgressHeader)) .map(_.stripPrefix(ClickhouseProgressHeader + ":")) - .map(progressJson => { - queryId.getOrElse("unknown") + "\n" + progressJson - }) - .foreach(progress => { - source.offer(progress) - }) + .map(progressJson => queryId.getOrElse("unknown") + "\n" + progressJson) + .foreach(progress => source.offer(progress)) } } } } ) - setHandler(serverOutput, new OutHandler { - override def onPull(): Unit = - pull(clientInput) - }) - setHandler(clientOutput, new OutHandler { - override def onPull(): Unit = - pull(serverInput) - }) + setHandler( + serverOutput, + new OutHandler { + override def onPull(): Unit = + pull(clientInput) + } + ) + setHandler( + clientOutput, + new OutHandler { + override def onPull(): Unit = + pull(serverInput) + } + ) } } object ProgressHeadersAsEventsStage { - val InternalQueryIdentifier = "X-Internal-Identifier" + val InternalQueryIdentifier = "X-Internal-Identifier" val ClickhouseProgressHeader = "X-ClickHouse-Progress" val AcceptedMark = "CLICKHOUSE_ACCEPTED" val Crlf = "\r\n" diff --git a/client/src/main/scala/com/crobox/clickhouse/internal/progress/QueryProgress.scala b/client/src/main/scala/com/crobox/clickhouse/internal/progress/QueryProgress.scala index 72bcd827..9958140b 100644 --- a/client/src/main/scala/com/crobox/clickhouse/internal/progress/QueryProgress.scala +++ b/client/src/main/scala/com/crobox/clickhouse/internal/progress/QueryProgress.scala @@ -25,7 +25,7 @@ object QueryProgress extends LazyLogging { def queryProgressStream: RunnableGraph[(SourceQueueWithComplete[String], Source[ClickhouseQueryProgress, NotUsed])] = Source .queue[String](1000, OverflowStrategy.dropHead) - .map[Option[ClickhouseQueryProgress]](queryAndProgress => { + .map[Option[ClickhouseQueryProgress]](queryAndProgress => queryAndProgress.split("\n", 2).toList match { case queryId :: ProgressHeadersAsEventsStage.AcceptedMark :: Nil => Some(ClickhouseQueryProgress(queryId, QueryAccepted)) @@ -60,14 +60,13 @@ object QueryProgress extends LazyLogging { None } - }) - .collect { - case Some(progress) => progress + ) + .collect { case Some(progress) => + progress } - .withAttributes(ActorAttributes.supervisionStrategy({ - case ex @ _ => - logger.warn("Detected failure in the query progress stream, resuming operation.", ex) - Supervision.Resume - })) + .withAttributes(ActorAttributes.supervisionStrategy { case ex @ _ => + logger.warn("Detected failure in the query progress stream, resuming operation.", ex) + Supervision.Resume + }) .toMat(BroadcastHub.sink)(Keep.both) } diff --git a/client/src/main/scala/com/crobox/clickhouse/stream/ClickhouseSink.scala b/client/src/main/scala/com/crobox/clickhouse/stream/ClickhouseSink.scala index d9f86f55..80b046c6 100644 --- a/client/src/main/scala/com/crobox/clickhouse/stream/ClickhouseSink.scala +++ b/client/src/main/scala/com/crobox/clickhouse/stream/ClickhouseSink.scala @@ -19,13 +19,14 @@ sealed trait TableOperation { case class Insert(table: String, jsonRow: String) extends TableOperation -case class Optimize(table: String, - localTable: Option[String] = None, - cluster: Option[String] = None, - partition: Option[String] = None, - `final`: Boolean = true, - deduplicate: Option[String] = None) - extends TableOperation { +case class Optimize( + table: String, + localTable: Option[String] = None, + cluster: Option[String] = None, + partition: Option[String] = None, + `final`: Boolean = true, + deduplicate: Option[String] = None +) extends TableOperation { def toSql: String = { var sql = s"OPTIMIZE TABLE ${localTable.getOrElse(table)}" @@ -40,22 +41,21 @@ case class Optimize(table: String, object ClickhouseSink extends LazyLogging { @deprecated("use [[#toSink()]] instead") - def insertSink(config: Config, client: ClickhouseClient, indexerName: Option[String] = None)( - implicit ec: ExecutionContext, + def insertSink(config: Config, client: ClickhouseClient, indexerName: Option[String] = None)(implicit + ec: ExecutionContext, settings: QuerySettings = QuerySettings() ): Sink[Insert, Future[Done]] = toSink(config, client, indexerName) - def toSink(config: Config, client: ClickhouseClient, indexerName: Option[String] = None)( - implicit ec: ExecutionContext, + def toSink(config: Config, client: ClickhouseClient, indexerName: Option[String] = None)(implicit + ec: ExecutionContext, settings: QuerySettings = QuerySettings() ): Sink[TableOperation, Future[Done]] = { val indexerGeneralConfig = config.getConfig("crobox.clickhouse.indexer") val mergedIndexerConfig = indexerName - .flatMap( - theIndexName => - if (indexerGeneralConfig.hasPath(theIndexName)) - Some(indexerGeneralConfig.getConfig(theIndexName).withFallback(indexerGeneralConfig)) - else None + .flatMap(theIndexName => + if (indexerGeneralConfig.hasPath(theIndexName)) + Some(indexerGeneralConfig.getConfig(theIndexName).withFallback(indexerGeneralConfig)) + else None ) .getOrElse(indexerGeneralConfig) val batchSize = mergedIndexerConfig.getInt("batch-size") @@ -63,7 +63,7 @@ object ClickhouseSink extends LazyLogging { Flow[TableOperation] .groupBy(Int.MaxValue, _.table) .groupedWithin(batchSize, flushInterval) - .mapAsync(mergedIndexerConfig.getInt("concurrent-requests"))(operations => { + .mapAsync(mergedIndexerConfig.getInt("concurrent-requests")) { operations => val table = operations.head.table logger.debug( s"Executing ${operations.size} operations on table: $table. Group Within: ($batchSize - $flushInterval)" @@ -89,20 +89,20 @@ object ClickhouseSink extends LazyLogging { Future.successful("") } } - }) + } .mergeSubstreams .toMat(Sink.ignore)(Keep.right) } - private def insertTable(client: ClickhouseClient, table: String, payload: Seq[String])( - implicit ec: ExecutionContext, + private def insertTable(client: ClickhouseClient, table: String, payload: Seq[String])(implicit + ec: ExecutionContext, settings: QuerySettings ): Future[String] = { logger.debug(s"Inserting ${payload.size} entries in table: $table.") client .execute(s"INSERT INTO $table FORMAT JSONEachRow", payload.mkString("\n")) - .recover { - case ex => throw ClickhouseIndexingException("failed to index", ex, payload, table) + .recover { case ex => + throw ClickhouseIndexingException("failed to index", ex, payload, table) } } @@ -112,11 +112,12 @@ object ClickhouseSink extends LazyLogging { )(implicit ec: ExecutionContext, settings: QuerySettings): Future[String] = client .execute(statement.toSql) - .recover { - case ex => - throw ClickhouseIndexingException(s"failed to optimize ${statement.table}", - ex, - Seq(statement.toSql), - statement.table) + .recover { case ex => + throw ClickhouseIndexingException( + s"failed to optimize ${statement.table}", + ex, + Seq(statement.toSql), + statement.table + ) } } diff --git a/client/src/main/scala/com/crobox/clickhouse/time/Duration.scala b/client/src/main/scala/com/crobox/clickhouse/time/Duration.scala index 234c2b43..3459616a 100644 --- a/client/src/main/scala/com/crobox/clickhouse/time/Duration.scala +++ b/client/src/main/scala/com/crobox/clickhouse/time/Duration.scala @@ -7,10 +7,9 @@ sealed trait Duration { } /** - * Parses a duration expressed in string to the corresponding value. - * It accepts input in the format `{value}{label}` or `{label}`. - * Examples: 1h, 2hours, 4day, day, month, 1M - **/ + * Parses a duration expressed in string to the corresponding value. It accepts input in the format `{value}{label}` or + * `{label}`. Examples: 1h, 2hours, 4day, day, month, 1M + */ object Duration { private val DurationRegex = "(\\d+)?(\\D+)".r @@ -34,8 +33,8 @@ object MultiDuration { } /** - * Special duration class that indicates the whole duration; similar to Duration.Inf - */ + * Special duration class that indicates the whole duration; similar to Duration.Inf + */ case object TotalDuration extends Duration { override val unit: TimeUnit = TimeUnit.Total } diff --git a/client/src/main/scala/com/crobox/clickhouse/time/MultiInterval.scala b/client/src/main/scala/com/crobox/clickhouse/time/MultiInterval.scala index cc6e7078..e5895b1f 100644 --- a/client/src/main/scala/com/crobox/clickhouse/time/MultiInterval.scala +++ b/client/src/main/scala/com/crobox/clickhouse/time/MultiInterval.scala @@ -6,13 +6,15 @@ import org.joda.time.base.BaseInterval import org.joda.time.{DateTime, DateTimeConstants, DateTimeZone, Interval} /** - * A multi interval is a interval that contains subintervals, - * this is then used to select data by constaint, and groups/aggregates - * this into subintervals in for example a query + * A multi interval is a interval that contains subintervals, this is then used to select data by constaint, and + * groups/aggregates this into subintervals in for example a query * - * @param rawStart The starting time for the interval - * @param rawEnd The ending time for the interval - * @param duration The length/duration of the subintervals + * @param rawStart + * The starting time for the interval + * @param rawEnd + * The ending time for the interval + * @param duration + * The length/duration of the subintervals */ case class MultiInterval(rawStart: DateTime, rawEnd: DateTime, duration: Duration) extends BaseInterval(startFromDate(rawStart, duration), intervalsBetween(rawStart, rawEnd, duration).last.getEnd) { @@ -30,7 +32,7 @@ object MultiInterval { case MultiDuration(value, Second) => val ref = start.withMillisOfSecond(0) - val secs = ref.getMillis / 1000 + val secs = ref.getMillis / 1000 val detSecs = secs - (secs % value) ref.withMillis(detSecs * 1000) @@ -39,8 +41,7 @@ object MultiInterval { .withSecondOfMinute(0) .withMillisOfSecond(0) - - val mins = ref.getMillis / Minute.standardMillis + val mins = ref.getMillis / Minute.standardMillis val detMin = mins - (mins % value) ref.withMillis(detMin * Minute.standardMillis) @@ -50,47 +51,47 @@ object MultiInterval { .withSecondOfMinute(0) .withMillisOfSecond(0) - val hours = ref.getMillis / Hour.standardMillis + val hours = ref.getMillis / Hour.standardMillis val detHours = hours - (hours % value) ref.withMillis(detHours * Hour.standardMillis) case MultiDuration(value, Day) => - val ref = start.withTimeAtStartOfDay() + val ref = start.withTimeAtStartOfDay() val tzOffset = ref.getZone.getOffset(ref.withZone(DateTimeZone.UTC)) - val days = (ref.getMillis + tzOffset) / Day.standardMillis + val days = (ref.getMillis + tzOffset) / Day.standardMillis val detDays = days - (days % value) ref.withMillis((detDays * Day.standardMillis) - tzOffset) case MultiDuration(value, Week) => - val ref = start.withTimeAtStartOfDay.withDayOfWeek(DateTimeConstants.MONDAY) + val ref = start.withTimeAtStartOfDay.withDayOfWeek(DateTimeConstants.MONDAY) val tzOffset = ref.getZone.getOffset(ref.withZone(DateTimeZone.UTC)) - //Week 1 (since epoch) starts at the 5th of January 1970, hence we subtract the 4 days of week 0 + // Week 1 (since epoch) starts at the 5th of January 1970, hence we subtract the 4 days of week 0 val msWeek1 = ref.getMillis - (Day.standardMillis * 4) + tzOffset - val weeks = msWeek1 / Week.standardMillis + val weeks = msWeek1 / Week.standardMillis val detWeeks = weeks - (weeks % value) ref.withMillis((detWeeks * Week.standardMillis) + (Day.standardMillis * 4) - tzOffset) case MultiDuration(value, Month) => val ref = start.withTimeAtStartOfDay.withDayOfMonth(1) - val months = (ref.getYear * 12) + ref.getMonthOfYear + val months = (ref.getYear * 12) + ref.getMonthOfYear val detRelMonths = (months - 1) - (months % value) val detMonthOfYearZeroBased = detRelMonths % 12 - val detYear = (detRelMonths - detMonthOfYearZeroBased) / 12 + val detYear = (detRelMonths - detMonthOfYearZeroBased) / 12 ref.withYear(detYear).withMonthOfYear(detMonthOfYearZeroBased + 1) case MultiDuration(value, Quarter) => val ref = start.withTimeAtStartOfDay.withDayOfMonth(1) - val quarter = (ref.getYear * 4) + (ref.getMonthOfYear - 1) / 3 + val quarter = (ref.getYear * 4) + (ref.getMonthOfYear - 1) / 3 val detRelQuarters = quarter - (quarter % value) val detQuarterOfYearZeroBased = detRelQuarters % 4 - val detYear = (detRelQuarters - detQuarterOfYearZeroBased) / 4 + val detYear = (detRelQuarters - detQuarterOfYearZeroBased) / 4 ref.withYear(detYear).withMonthOfYear(detQuarterOfYearZeroBased * 3 + 1) @@ -105,7 +106,6 @@ object MultiInterval { case d => throw new IllegalArgumentException(s"Invalid duration: $d") } - private def endFromDate(date: DateTime, duration: Duration) = duration match { case TotalDuration => @@ -141,10 +141,10 @@ object MultiInterval { IndexedSeq(new Interval(start, end)) case _ => Iterator - .iterate(new Interval(startFromDate(start, duration), endFromDate(start, duration)))(interval => { + .iterate(new Interval(startFromDate(start, duration), endFromDate(start, duration))) { interval => val intervalStart = nextStartFromDate(interval.getStart, duration) new Interval(intervalStart, endFromDate(intervalStart, duration)) - }) + } .takeWhile(_.getStart.isBefore(end)) .toIndexedSeq } diff --git a/client/src/main/scala/com/crobox/clickhouse/time/TimeUnit.scala b/client/src/main/scala/com/crobox/clickhouse/time/TimeUnit.scala index a4fecc66..72ef245e 100644 --- a/client/src/main/scala/com/crobox/clickhouse/time/TimeUnit.scala +++ b/client/src/main/scala/com/crobox/clickhouse/time/TimeUnit.scala @@ -21,8 +21,7 @@ abstract class MultiTimeUnit(override val labels: Array[String], override val ma } /** - * Represents a unit of time with a fixed length, - * used in the multi interval functionality + * Represents a unit of time with a fixed length, used in the multi interval functionality */ object TimeUnit { @@ -53,20 +52,19 @@ object TimeUnit { override val asPeriod: Period = Period.months(1) } - case object Quarter extends MultiTimeUnit(Array("q", "quarter"), "quarter"){ + case object Quarter extends MultiTimeUnit(Array("q", "quarter"), "quarter") { override val asPeriod: Period = Period.months(3) } - case object Year extends MultiTimeUnit(Array("y", "year"), "year"){ + case object Year extends MultiTimeUnit(Array("y", "year"), "year") { override val asPeriod: Period = Period.years(1) } case object Total extends TimeUnit { override val labels: Array[String] = Array("t", "total") - override val mainLabel: String = "total" + override val mainLabel: String = "total" } - def lookup(label: String): TimeUnit = allUnits .find(_.labels.contains(label)) .getOrElse(throw new IllegalArgumentException(s"Invalid label $label for time unit.")) diff --git a/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientAsyncSpec.scala b/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientAsyncSpec.scala index f1225f72..dcee2176 100644 --- a/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientAsyncSpec.scala +++ b/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientAsyncSpec.scala @@ -34,31 +34,28 @@ abstract class ClickhouseClientAsyncSpec(val config: Config = ConfigFactory.load def requestParallelHosts(balancer: HostBalancer, connections: Int = 10): Future[Seq[Uri]] = Future.sequence( (1 to connections) - .map(_ => { - balancer.nextHost - }) + .map(_ => balancer.nextHost) ) def getConnections(manager: ActorRef, connections: Int = 10): Future[Seq[Uri]] = Future.sequence( (1 to connections) - .map(_ => { - (manager ? GetConnection()).mapTo[Uri] - }) + .map(_ => (manager ? GetConnection()).mapTo[Uri]) ) // TODO change this methods to custom matchers def returnsConnectionsInRoundRobinFashion(manager: ActorRef, expectedConnections: Set[Uri]): Future[Assertion] = { val RequestConnectionsPerHost = 100 getConnections(manager, RequestConnectionsPerHost * expectedConnections.size) - .map(connections => { - expectedConnections.foreach( - uri => - connections - .count(_ == uri) shouldBe (RequestConnectionsPerHost +- RequestConnectionsPerHost / 10) //10% delta for warm-up phase + .map { connections => + expectedConnections.foreach(uri => + connections + .count( + _ == uri + ) shouldBe (RequestConnectionsPerHost +- RequestConnectionsPerHost / 10) // 10% delta for warm-up phase ) succeed - }) + } } def compressGzip(content: String): Array[Byte] = { diff --git a/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientSpec.scala b/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientSpec.scala index 3f1ecfa1..efc7c61b 100644 --- a/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientSpec.scala +++ b/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientSpec.scala @@ -45,7 +45,7 @@ abstract class ClickhouseClientSpec(val config: Config = ConfigFactory.load()) def ~%(percent: Int, base: Base = numeric.fromInt(5)): TripleEqualsSupport.Spread[T] = { import numeric._ - value +- numeric.plus(base, numeric.fromInt(((value.toDouble / 100D) * percent).toInt)) + value +- numeric.plus(base, numeric.fromInt(((value.toDouble / 100d) * percent).toInt)) } } } diff --git a/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientTest.scala b/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientTest.scala index 1882ec74..54099396 100644 --- a/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/ClickhouseClientTest.scala @@ -14,35 +14,35 @@ import javax.net.ssl.KeyManagerFactory import java.security.SecureRandom /** - * @author Sjoerd Mulder - * @author Leonard Wolters + * @author + * Sjoerd Mulder + * @author + * Leonard Wolters * @since 31-3-17 */ class ClickhouseClientTest extends ClickhouseClientAsyncSpec { val client: ClickhouseClient = new ClickhouseClient(Some(config)) - it should "select" in { + it should "select" in client .query("select 1 + 2") .map { f => f.trim.toInt should be(3) } - .flatMap( - _ => - client.query("select currentDatabase()").map { f => - f.trim should be("default") + .flatMap(_ => + client.query("select currentDatabase()").map { f => + f.trim should be("default") } ) - } it should "support SSL certs" in { def createConnectionContext() = { val keyStoreResource = "../.docker/certs/keystore.jks" - val password = "password" - + val password = "password" + val keyStore = KeyStore.getInstance("JKS") - val in = new FileInputStream(keyStoreResource) + val in = new FileInputStream(keyStoreResource) keyStore.load(in, password.toCharArray) val keyManagerFactory = KeyManagerFactory.getInstance("SunX509") @@ -57,18 +57,23 @@ class ClickhouseClientTest extends ClickhouseClientAsyncSpec { } new ClickhouseClient( - Some(config - .withValue("crobox.clickhouse.client.connection.port", ConfigValueFactory.fromAnyRef(8447)) - .withValue("crobox.clickhouse.client.connection.host", ConfigValueFactory.fromAnyRef("https://clickhouseserver.test"))), - customConnectionContext = Some(createConnectionContext())) + Some( + config + .withValue("crobox.clickhouse.client.connection.port", ConfigValueFactory.fromAnyRef(8447)) + .withValue( + "crobox.clickhouse.client.connection.host", + ConfigValueFactory.fromAnyRef("https://clickhouseserver.test") + ) + ), + customConnectionContext = Some(createConnectionContext()) + ) .query("select 1 + 2") .map { f => f.trim.toInt should be(3) } - .flatMap( - _ => - client.query("select currentDatabase()").map { f => - f.trim should be("default") + .flatMap(_ => + client.query("select currentDatabase()").map { f => + f.trim should be("default") } ) } @@ -89,39 +94,35 @@ class ClickhouseClientTest extends ClickhouseClientAsyncSpec { } } - it should "decline execute SELECT query" in { - client.execute("select 1 + 2").map(_ => fail()).recover { - case _: IllegalArgumentException => succeed + it should "decline execute SELECT query" in + client.execute("select 1 + 2").map(_ => fail()).recover { case _: IllegalArgumentException => + succeed } - } // flaky test (not possible to rerun failed tasks in GitHub Actions. Therefore -for now- ignored this test - ignore should "publish query progress messages" in { + ignore should "publish query progress messages" in client .queryWithProgress("select 1 + 2") .runWith(Sink.seq[QueryProgress]) .map(progress => progress should contain theSameElementsAs Seq(QueryAccepted, QueryFinished)) - } - it should "materialize progress source with the query result" in { + it should "materialize progress source with the query result" in client .queryWithProgress("select 1 + 2") .toMat(Sink.ignore)(Keep.left) .run() .map(result => result.shouldBe("3\n")) - } // This test is failing using new clickhouse server; apparently too fast? - ignore should "send full progress messages" in { + ignore should "send full progress messages" in client .queryWithProgress("select sum(number) FROM (select number from system.numbers limit 100000000)") .runWith(Sink.seq[QueryProgress]) - .map(progress => { - progress collect { - case qp: Progress => qp + .map(progress => + progress collect { case qp: Progress => + qp } should not be empty - }) - } + ) it should "parse server version" in { new ClickhouseClient( diff --git a/client/src/test/scala/com/crobox/clickhouse/balancing/ConnectionManagerActorTest.scala b/client/src/test/scala/com/crobox/clickhouse/balancing/ConnectionManagerActorTest.scala index d61d29a0..d7dfab96 100644 --- a/client/src/test/scala/com/crobox/clickhouse/balancing/ConnectionManagerActorTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/balancing/ConnectionManagerActorTest.scala @@ -42,7 +42,7 @@ class ConnectionManagerActorTest extends ClickhouseClientAsyncSpec with Eventual manager ! PoisonPill } - "Connection manager" should "remove connection with failed health check" in { + "Connection manager" should "remove connection with failed health check" in Future .sequence( Seq( @@ -52,13 +52,10 @@ class ConnectionManagerActorTest extends ClickhouseClientAsyncSpec with Eventual uris(host3)._1.offer(ClickhouseHostHealth.Dead(host3, new IllegalArgumentException("Got it wrong"))) ) ) - .flatMap(_ => { + .flatMap { _ => manager ! Connections(uris.keySet) - ensureCompleted(uris).flatMap(_ => { - returnsConnectionsInRoundRobinFashion(manager, uris.keySet.-(host3)) - }) - }) - } + ensureCompleted(uris).flatMap(_ => returnsConnectionsInRoundRobinFashion(manager, uris.keySet.-(host3))) + } it should "add back connection when health check passes" in { uris(host1)._1.offer(Alive(host1)) @@ -67,11 +64,11 @@ class ConnectionManagerActorTest extends ClickhouseClientAsyncSpec with Eventual manager ! Connections(uris.keySet) ensureCompleted(uris - host3) .flatMap(_ => returnsConnectionsInRoundRobinFashion(manager, uris.keySet.-(host3))) - .flatMap(_ => { + .flatMap { _ => uris(host3)._1.offer(ClickhouseHostHealth.Alive(host3)) ensureCompleted(uris.filter(_._1 == host3)) .flatMap(_ => returnsConnectionsInRoundRobinFashion(manager, uris.keySet)) - }) + } } it should "cancel health source when connection is removed from configuration" in { @@ -101,10 +98,14 @@ class ConnectionManagerActorTest extends ClickhouseClientAsyncSpec with Eventual system.actorOf( ConnectionManagerActor.props( uri => uris(uri)._2, - Some(config - .getConfig("crobox.clickhouse.client") - .withValue("connection.fallback-to-config-host-during-initialization", ConfigValueFactory.fromAnyRef(true)) - .withValue("connection.host", ConfigValueFactory.fromAnyRef(host)) + Some( + config + .getConfig("crobox.clickhouse.client") + .withValue( + "connection.fallback-to-config-host-during-initialization", + ConfigValueFactory.fromAnyRef(true) + ) + .withValue("connection.host", ConfigValueFactory.fromAnyRef(host)) ) ) ) @@ -123,29 +124,32 @@ class ConnectionManagerActorTest extends ClickhouseClientAsyncSpec with Eventual ) private def statusesAsSource() - : (SourceQueueWithComplete[ClickhouseHostStatus], Source[ClickhouseHostStatus, Cancellable]) = { + : (SourceQueueWithComplete[ClickhouseHostStatus], Source[ClickhouseHostStatus, Cancellable]) = { val (queue, source) = Source .queue[ClickhouseHostStatus](10, OverflowStrategy.fail) .preMaterialize() - (queue, source.mapMaterializedValue(_ => { - new Cancellable { - override def cancel(): Boolean = { - queue.complete() - true + ( + queue, + source.mapMaterializedValue(_ => + new Cancellable { + override def cancel(): Boolean = { + queue.complete() + true + } + override def isCancelled: Boolean = queue.watchCompletion().isCompleted } - override def isCancelled: Boolean = queue.watchCompletion().isCompleted - } - })) + ) + ) } private def ensureCompleted( uris: Map[Uri, (SourceQueueWithComplete[ClickhouseHostStatus], Source[ClickhouseHostStatus, Cancellable])] ): Future[Iterable[Done]] = - Future.sequence(uris.values.map(queue => { + Future.sequence(uris.values.map { queue => queue._1.complete() queue._1.watchCompletion() - }) ++ Seq(Future { - Thread.sleep(1000)//FIXME find a cleaner way to ensure the manager processes all the elements from the stream + } ++ Seq(Future { + Thread.sleep(1000) // FIXME find a cleaner way to ensure the manager processes all the elements from the stream Done })) diff --git a/client/src/test/scala/com/crobox/clickhouse/balancing/HostBalancerTest.scala b/client/src/test/scala/com/crobox/clickhouse/balancing/HostBalancerTest.scala index 780e3b07..1c6eb186 100644 --- a/client/src/test/scala/com/crobox/clickhouse/balancing/HostBalancerTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/balancing/HostBalancerTest.scala @@ -16,7 +16,10 @@ class HostBalancerTest extends ClickhouseClientSpec { } it should "resolve to multi host balancer" in { - HostBalancer(Some(ConfigFactory.parseString(""" + HostBalancer( + Some( + ConfigFactory + .parseString(""" | connection: { | type: "balancing-hosts" | hosts: [ @@ -31,14 +34,20 @@ class HostBalancerTest extends ClickhouseClientSpec { | } | } | - """.stripMargin).withFallback(config.getConfig("crobox.clickhouse.client")))) match { + """.stripMargin) + .withFallback(config.getConfig("crobox.clickhouse.client")) + ) + ) match { case MultiHostBalancer(hosts, _) => hosts.toSeq should contain theSameElementsInOrderAs Seq(ClickhouseHostBuilder.toHost("localhost", Some(8123))) } } it should "resolve to cluster aware host balancer" in { - HostBalancer(Some(ConfigFactory.parseString(""" + HostBalancer( + Some( + ConfigFactory + .parseString(""" | connection: { | type: "cluster-aware" | host: "localhost" @@ -51,7 +60,10 @@ class HostBalancerTest extends ClickhouseClientSpec { | } | } | - """.stripMargin).withFallback(config.getConfig("crobox.clickhouse.client")))) match { + """.stripMargin) + .withFallback(config.getConfig("crobox.clickhouse.client")) + ) + ) match { case ClusterAwareHostBalancer(host, cluster, _, builtTimeout) => host shouldEqual ClickhouseHostBuilder.toHost("localhost", Some(8123)) cluster shouldBe "cluster" diff --git a/client/src/test/scala/com/crobox/clickhouse/balancing/SingleHostBalancerTest.scala b/client/src/test/scala/com/crobox/clickhouse/balancing/SingleHostBalancerTest.scala index 27d14b74..a34ac097 100644 --- a/client/src/test/scala/com/crobox/clickhouse/balancing/SingleHostBalancerTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/balancing/SingleHostBalancerTest.scala @@ -11,9 +11,7 @@ class SingleHostBalancerTest extends ClickhouseClientAsyncSpec { val uri = Uri("localhost").withPort(8123) val balancer = SingleHostBalancer(uri) val assertions = (1 to 10) - .map(_ => { - balancer.nextHost.map(_ shouldEqual uri) - }) + .map(_ => balancer.nextHost.map(_ shouldEqual uri)) Future.sequence(assertions).map(_ => succeed) } diff --git a/client/src/test/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlowTest.scala b/client/src/test/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlowTest.scala index 0fe52e6f..05ae2954 100644 --- a/client/src/test/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlowTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/balancing/discovery/cluster/ClusterConnectionFlowTest.scala @@ -16,9 +16,7 @@ class ClusterConnectionFlowTest extends ClickhouseClientAsyncSpec { .clusterConnectionsFlow(Future.successful(clickhouseUri), 2.seconds, "test_shard_localhost") .toMat(Sink.head)(Keep.both) .run() - futureResult.map(result => { - result.hosts should contain only ClickhouseHostBuilder.toHost("127.0.0.1", Some(8123)) - }) + futureResult.map(result => result.hosts should contain only ClickhouseHostBuilder.toHost("127.0.0.1", Some(8123))) } it should "fail for non existing cluster" in { @@ -27,11 +25,9 @@ class ClusterConnectionFlowTest extends ClickhouseClientAsyncSpec { .toMat(Sink.head)(Keep.both) .run() futureResult - .map(_ => { - fail("Returned answer for non existing clsuter") - }) - .recover { - case _: IllegalArgumentException => succeed + .map(_ => fail("Returned answer for non existing clsuter")) + .recover { case _: IllegalArgumentException => + succeed } } diff --git a/client/src/test/scala/com/crobox/clickhouse/internal/ClickhouseExecutorTest.scala b/client/src/test/scala/com/crobox/clickhouse/internal/ClickhouseExecutorTest.scala index 2fc40bda..1d71eb7e 100644 --- a/client/src/test/scala/com/crobox/clickhouse/internal/ClickhouseExecutorTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/internal/ClickhouseExecutorTest.scala @@ -14,16 +14,16 @@ import com.typesafe.config.Config import scala.concurrent.{ExecutionContext, Future} class ClickhouseExecutorTest extends ClickhouseClientAsyncSpec { - private val balancingHosts = Seq(Uri("http://host1"), Uri("http://host2"), Uri("http://host3"), Uri("http://host4")) - private val hosts = new CircularIteratorSet(balancingHosts) - private lazy val self = this + private val balancingHosts = Seq(Uri("http://host1"), Uri("http://host2"), Uri("http://host3"), Uri("http://host4")) + private val hosts = new CircularIteratorSet(balancingHosts) + private lazy val self = this private var response: Uri => Future[String] = _ - private lazy val executor = { + private lazy val executor = new ClickHouseExecutor with ClickhouseResponseParser with ClickhouseQueryBuilder { override protected val customConnectionContext: Option[HttpsConnectionContext] = None - override protected implicit val system: ActorSystem = self.system - override protected implicit val executionContext: ExecutionContext = system.dispatcher - override protected val config: Config = self.config.getConfig("crobox.clickhouse.client") + override protected implicit val system: ActorSystem = self.system + override protected implicit val executionContext: ExecutionContext = system.dispatcher + override protected val config: Config = self.config.getConfig("crobox.clickhouse.client") override protected val hostBalancer: HostBalancer = new HostBalancer { override def nextHost: Future[Uri] = Future.successful(hosts.next()) } @@ -37,12 +37,11 @@ class ClickhouseExecutorTest extends ClickhouseClientAsyncSpec { QueryProgress ] ] - )( - implicit materializer: Materializer, + )(implicit + materializer: Materializer, executionContext: ExecutionContext ): Future[String] = response(host) } - } it should "retry all requests with stream tcp connection" in { val exception = new StreamTcpException("") @@ -50,11 +49,13 @@ class ClickhouseExecutorTest extends ClickhouseClientAsyncSpec { executor .executeRequestWithProgress("", QuerySettings(ReadQueries)) .runWith(Sink.seq[QueryProgress]) - .map(progress => { - progress should contain theSameElementsAs Seq(QueryRetry(exception, 1), - QueryRetry(exception, 2), - QueryRetry(exception, 3)) - }) + .map(progress => + progress should contain theSameElementsAs Seq( + QueryRetry(exception, 1), + QueryRetry(exception, 2), + QueryRetry(exception, 3) + ) + ) } it should "retry idempotent queries for all exceptions" in { @@ -63,11 +64,13 @@ class ClickhouseExecutorTest extends ClickhouseClientAsyncSpec { executor .executeRequestWithProgress("", QuerySettings(AllQueries, idempotent = Some(true))) .runWith(Sink.seq[QueryProgress]) - .map(progress => { - progress should contain theSameElementsAs Seq(QueryRetry(exception, 1), - QueryRetry(exception, 2), - QueryRetry(exception, 3)) - }) + .map(progress => + progress should contain theSameElementsAs Seq( + QueryRetry(exception, 1), + QueryRetry(exception, 2), + QueryRetry(exception, 3) + ) + ) } it should "not retry non idempotent queries for non connection exception" in { @@ -76,9 +79,7 @@ class ClickhouseExecutorTest extends ClickhouseClientAsyncSpec { executor .executeRequestWithProgress("", QuerySettings(AllQueries)) .runWith(Sink.seq[QueryProgress]) - .map(progress => { - progress should contain theSameElementsAs Seq() - }) + .map(progress => progress should contain theSameElementsAs Seq()) } it should "execute retries on the next balancer host" in { @@ -91,12 +92,14 @@ class ClickhouseExecutorTest extends ClickhouseClientAsyncSpec { executor .executeRequestWithProgress("", QuerySettings(AllQueries, idempotent = Some(true))) .runWith(Sink.seq[QueryProgress]) - .map(progress => { - progress should contain theSameElementsAs Seq(QueryRetry(exception, 1), - QueryRetry(exception, 2), - QueryRetry(exception, 3)) + .map { progress => + progress should contain theSameElementsAs Seq( + QueryRetry(exception, 1), + QueryRetry(exception, 2), + QueryRetry(exception, 3) + ) servedHosts should contain theSameElementsAs balancingHosts - }) + } } it should "not retry non retryable exceptions" in { @@ -105,8 +108,6 @@ class ClickhouseExecutorTest extends ClickhouseClientAsyncSpec { executor .executeRequestWithProgress("", QuerySettings(AllQueries)) .runWith(Sink.seq[QueryProgress]) - .map(progress => { - progress should contain theSameElementsAs Seq() - }) + .map(progress => progress should contain theSameElementsAs Seq()) } } diff --git a/client/src/test/scala/com/crobox/clickhouse/stream/ClickhouseIndexingSubscriberTest.scala b/client/src/test/scala/com/crobox/clickhouse/stream/ClickhouseIndexingSubscriberTest.scala index 36508209..4b6396cc 100644 --- a/client/src/test/scala/com/crobox/clickhouse/stream/ClickhouseIndexingSubscriberTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/stream/ClickhouseIndexingSubscriberTest.scala @@ -31,10 +31,13 @@ class ClickhouseIndexingSubscriberTest extends ClickhouseClientAsyncSpec with Sc override protected def beforeEach(): Unit = { super.beforeAll() - Await.ready(for { - _ <- client.execute(createDb) - create <- client.execute(createTable) - } yield create, timeout.duration) + Await.ready( + for { + _ <- client.execute(createDb) + create <- client.execute(createTable) + } yield create, + timeout.duration + ) subscriberCompletes = Promise[Unit]() } @@ -45,22 +48,21 @@ class ClickhouseIndexingSubscriberTest extends ClickhouseClientAsyncSpec with Sc Await.ready(client.execute(dropDb), timeout.duration) } - def unparsedInserts(key: String): Seq[Map[String, Any]] = (1 to 10).map( - _ => - Map( - "i" -> Random.nextInt(100), - "s" -> key, - "a" -> (1 to Random.nextInt(20)).map(_ => Random.nextInt(200)) + def unparsedInserts(key: String): Seq[Map[String, Any]] = (1 to 10).map(_ => + Map( + "i" -> Random.nextInt(100), + "s" -> key, + "a" -> (1 to Random.nextInt(20)).map(_ => Random.nextInt(200)) ) ) def parsedInserts(key: String): Seq[String] = unparsedInserts(key).map( _.view - .mapValues({ + .mapValues { case value: Int => value.toString case value: String => "\"" + value + "\"" case value: IndexedSeq[_] => "[" + value.mkString(", ") + "]" - }) + } .map { case (k, v) => s""""$k" : $v""" } .mkString(", ") ) @@ -87,8 +89,8 @@ class ClickhouseIndexingSubscriberTest extends ClickhouseClientAsyncSpec with Sc ClickhouseSink.optimizeTable(client, Optimize(table = "distributed"))(dispatcher, settings) statements.last should be("OPTIMIZE TABLE distributed FINAL") - ClickhouseSink.optimizeTable(client, Optimize(table = "distributed", localTable = Option("local")))(dispatcher, - settings) + ClickhouseSink + .optimizeTable(client, Optimize(table = "distributed", localTable = Option("local")))(dispatcher, settings) statements.last should be("OPTIMIZE TABLE local FINAL") ClickhouseSink.optimizeTable( @@ -97,11 +99,15 @@ class ClickhouseIndexingSubscriberTest extends ClickhouseClientAsyncSpec with Sc )(dispatcher, settings) statements.last should be("OPTIMIZE TABLE local ON CLUSTER cluster FINAL") - ClickhouseSink.optimizeTable(client, - Optimize(table = "distributed", - localTable = Option("local"), - cluster = Option("cluster"), - partition = Option("ID abc")))(dispatcher, settings) + ClickhouseSink.optimizeTable( + client, + Optimize( + table = "distributed", + localTable = Option("local"), + cluster = Option("cluster"), + partition = Option("ID abc") + ) + )(dispatcher, settings) statements.last should be("OPTIMIZE TABLE local ON CLUSTER cluster PARTITION ID abc FINAL") } diff --git a/client/src/test/scala/com/crobox/clickhouse/time/MultiIntervalTest.scala b/client/src/test/scala/com/crobox/clickhouse/time/MultiIntervalTest.scala index f7b3cab5..1efe29e1 100644 --- a/client/src/test/scala/com/crobox/clickhouse/time/MultiIntervalTest.scala +++ b/client/src/test/scala/com/crobox/clickhouse/time/MultiIntervalTest.scala @@ -22,93 +22,141 @@ class MultiIntervalTest extends AnyFlatSpecLike with Matchers with TableDrivenPr private val dateTime: DateTime = toDateTime(2014, 5, 8, 16, 26, 12, 123) - "Sub intervals" should "build sub intervals and start end for all time units" in { + "Sub intervals" should "build sub intervals and start end for all time units" in forAll( Table( ("Time Unit", "End interval function", "Expected intervals"), - (MultiDuration(1, TimeUnit.Second), + ( + MultiDuration(1, TimeUnit.Second), (time: DateTime) => time.plusSeconds(1), - IndexedSeq(toDateTime(2014, 5, 8, 16, 26, 12, 0) to - toDateTime(2014, 5, 8, 16, 26, 13, 0), + IndexedSeq( + toDateTime(2014, 5, 8, 16, 26, 12, 0) to + toDateTime(2014, 5, 8, 16, 26, 13, 0), toDateTime(2014, 5, 8, 16, 26, 13, 0) to - toDateTime(2014, 5, 8, 16, 26, 14, 0))), - (MultiDuration(1, TimeUnit.Minute), + toDateTime(2014, 5, 8, 16, 26, 14, 0) + ) + ), + ( + MultiDuration(1, TimeUnit.Minute), (time: DateTime) => time.plusMinutes(1), - IndexedSeq(toDateTime(2014, 5, 8, 16, 26, 0, 0) to - toDateTime(2014, 5, 8, 16, 27, 0, 0), + IndexedSeq( + toDateTime(2014, 5, 8, 16, 26, 0, 0) to + toDateTime(2014, 5, 8, 16, 27, 0, 0), toDateTime(2014, 5, 8, 16, 27, 0, 0) to - toDateTime(2014, 5, 8, 16, 28, 0, 0))), - (MultiDuration(1, TimeUnit.Hour), + toDateTime(2014, 5, 8, 16, 28, 0, 0) + ) + ), + ( + MultiDuration(1, TimeUnit.Hour), (time: DateTime) => time.plusHours(1), - IndexedSeq(toDateTime(2014, 5, 8, 16, 0, 0, 0) to - toDateTime(2014, 5, 8, 17, 0, 0, 0), + IndexedSeq( + toDateTime(2014, 5, 8, 16, 0, 0, 0) to + toDateTime(2014, 5, 8, 17, 0, 0, 0), toDateTime(2014, 5, 8, 17, 0, 0, 0) to - toDateTime(2014, 5, 8, 18, 0, 0, 0))), - (MultiDuration(6, TimeUnit.Hour), + toDateTime(2014, 5, 8, 18, 0, 0, 0) + ) + ), + ( + MultiDuration(6, TimeUnit.Hour), (time: DateTime) => time.plusHours(1), - IndexedSeq(toDateTime(2014, 5, 8, 12, 0, 0, 0) to - toDateTime(2014, 5, 8, 18, 0, 0, 0))), - (MultiDuration(6, TimeUnit.Hour), + IndexedSeq( + toDateTime(2014, 5, 8, 12, 0, 0, 0) to + toDateTime(2014, 5, 8, 18, 0, 0, 0) + ) + ), + ( + MultiDuration(6, TimeUnit.Hour), (time: DateTime) => time.plusHours(6), - IndexedSeq(toDateTime(2014, 5, 8, 12, 0, 0, 0) to - toDateTime(2014, 5, 8, 18, 0, 0, 0), + IndexedSeq( + toDateTime(2014, 5, 8, 12, 0, 0, 0) to + toDateTime(2014, 5, 8, 18, 0, 0, 0), toDateTime(2014, 5, 8, 18, 0, 0, 0) to - toDateTime(2014, 5, 9, 0, 0, 0, 0))), - (MultiDuration(1, TimeUnit.Day), + toDateTime(2014, 5, 9, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(1, TimeUnit.Day), (time: DateTime) => time.plusDays(1), - IndexedSeq(toDateTime(2014, 5, 8, 0, 0, 0, 0) to - toDateTime(2014, 5, 9, 0, 0, 0, 0), + IndexedSeq( + toDateTime(2014, 5, 8, 0, 0, 0, 0) to + toDateTime(2014, 5, 9, 0, 0, 0, 0), toDateTime(2014, 5, 9, 0, 0, 0, 0) to - toDateTime(2014, 5, 10, 0, 0, 0, 0))), - (MultiDuration(1, TimeUnit.Week), + toDateTime(2014, 5, 10, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(1, TimeUnit.Week), (time: DateTime) => time.plusWeeks(1), - IndexedSeq(toDateTime(2014, 5, 5, 0, 0, 0, 0) to - toDateTime(2014, 5, 12, 0, 0, 0, 0), + IndexedSeq( + toDateTime(2014, 5, 5, 0, 0, 0, 0) to + toDateTime(2014, 5, 12, 0, 0, 0, 0), toDateTime(2014, 5, 12, 0, 0, 0, 0) to - toDateTime(2014, 5, 19, 0, 0, 0, 0))), - (MultiDuration(3, TimeUnit.Week), + toDateTime(2014, 5, 19, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(3, TimeUnit.Week), (time: DateTime) => time.plusWeeks(1), - IndexedSeq(toDateTime(2014, 5, 5, 0, 0, 0, 0) to - toDateTime(2014, 5, 26, 0, 0, 0, 0))), - (MultiDuration(1, TimeUnit.Month), + IndexedSeq( + toDateTime(2014, 5, 5, 0, 0, 0, 0) to + toDateTime(2014, 5, 26, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(1, TimeUnit.Month), (time: DateTime) => time.plusMonths(1), - IndexedSeq(toDateTime(2014, 5, 1, 0, 0, 0, 0) to - toDateTime(2014, 6, 1, 0, 0, 0, 0), + IndexedSeq( + toDateTime(2014, 5, 1, 0, 0, 0, 0) to + toDateTime(2014, 6, 1, 0, 0, 0, 0), toDateTime(2014, 6, 1, 0, 0, 0, 0) to - toDateTime(2014, 7, 1, 0, 0, 0, 0))), - (MultiDuration(2, TimeUnit.Month), + toDateTime(2014, 7, 1, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(2, TimeUnit.Month), (time: DateTime) => time.plusMonths(1), - IndexedSeq(toDateTime(2014, 4, 1, 0, 0, 0, 0) to - toDateTime(2014, 6, 1, 0, 0, 0, 0), + IndexedSeq( + toDateTime(2014, 4, 1, 0, 0, 0, 0) to + toDateTime(2014, 6, 1, 0, 0, 0, 0), toDateTime(2014, 6, 1, 0, 0, 0, 0) to - toDateTime(2014, 8, 1, 0, 0, 0, 0))), - (MultiDuration(TimeUnit.Quarter), + toDateTime(2014, 8, 1, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(TimeUnit.Quarter), (time: DateTime) => time.plusMonths(3), - IndexedSeq(toDateTime(2014, 4, 1, 0, 0, 0, 0) to - toDateTime(2014, 7, 1, 0, 0, 0, 0), + IndexedSeq( + toDateTime(2014, 4, 1, 0, 0, 0, 0) to + toDateTime(2014, 7, 1, 0, 0, 0, 0), toDateTime(2014, 7, 1, 0, 0, 0, 0) to - toDateTime(2014, 10, 1, 0, 0, 0, 0))), - (MultiDuration(TimeUnit.Quarter), + toDateTime(2014, 10, 1, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(TimeUnit.Quarter), (time: DateTime) => time.plusMonths(1), - IndexedSeq(toDateTime(2014, 4, 1, 0, 0, 0, 0) to - toDateTime(2014, 7, 1, 0,0, 0, 0))), - (MultiDuration(TimeUnit.Year), + IndexedSeq( + toDateTime(2014, 4, 1, 0, 0, 0, 0) to + toDateTime(2014, 7, 1, 0, 0, 0, 0) + ) + ), + ( + MultiDuration(TimeUnit.Year), (time: DateTime) => time.plusMonths(3), - IndexedSeq(toDateTime(2014, 1, 1, 0, 0, 0, 0) to - toDateTime(2015, 1, 1, 0, 0, 0, 0))), - (TotalDuration, - (time: DateTime) => time.plusMonths(3), - IndexedSeq(dateTime to dateTime.plusMonths(3))) + IndexedSeq( + toDateTime(2014, 1, 1, 0, 0, 0, 0) to + toDateTime(2015, 1, 1, 0, 0, 0, 0) + ) + ), + (TotalDuration, (time: DateTime) => time.plusMonths(3), IndexedSeq(dateTime to dateTime.plusMonths(3))) ) - ) { (duration, intervalEnd, intervals) => { - val interval = MultiInterval(dateTime, intervalEnd(dateTime), duration) - interval.getStart should be(intervals.head.getStart) - interval.getEnd should be(intervals.last.getEnd) - interval.subIntervals should contain theSameElementsInOrderAs intervals + ) { (duration, intervalEnd, intervals) => + val interval = MultiInterval(dateTime, intervalEnd(dateTime), duration) + interval.getStart should be(intervals.head.getStart) + interval.getEnd should be(intervals.last.getEnd) + interval.subIntervals should contain theSameElementsInOrderAs intervals - } } - } it should "build correctly full time interval" in { val start = DateTime.now().withTimeAtStartOfDay() @@ -120,16 +168,14 @@ class MultiIntervalTest extends AnyFlatSpecLike with Matchers with TableDrivenPr } it should "include sub interval for which start date is equal to expected interval end date" in { - val startDate = toDateTime(2012, 1, 1, 1, 1, 9, 999) - val endDate = toDateTime(2012, 1, 1, 1, 1, 14, 999) - val interval = MultiInterval(startDate, endDate, MultiDuration(5, TimeUnit.Second)) + val startDate = toDateTime(2012, 1, 1, 1, 1, 9, 999) + val endDate = toDateTime(2012, 1, 1, 1, 1, 14, 999) + val interval = MultiInterval(startDate, endDate, MultiDuration(5, TimeUnit.Second)) val startExpected = startDate.withMillisOfSecond(0).withSecondOfMinute(5) interval.getStart should be(startExpected) interval.getEnd should be(startExpected.plusSeconds(10)) - interval - .subIntervals should contain theSameElementsInOrderAs ( - (startExpected to startExpected.plusSeconds(5)) :: - (startExpected.plusSeconds(5) to startExpected.plusSeconds(10)) :: Nil) + interval.subIntervals should contain theSameElementsInOrderAs ((startExpected to startExpected.plusSeconds(5)) :: + (startExpected.plusSeconds(5) to startExpected.plusSeconds(10)) :: Nil) } -} \ No newline at end of file +} diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/ClickhouseTimeSeriesIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/ClickhouseTimeSeriesIT.scala index d96e04f8..8648cde2 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/ClickhouseTimeSeriesIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/ClickhouseTimeSeriesIT.scala @@ -22,17 +22,17 @@ class ClickhouseTimeSeriesIT extends DslITSpec with TableDrivenPropertyChecks { } implicit val clickhouseClient: ClickhouseClient = clickClient - val startInterval = DateTime.parse("2019-03-01").withTimeAtStartOfDay().withZone(DateTimeZone.UTC) - val secondsId = UUID.randomUUID() - val dayId = UUID.randomUUID() - val minutesId = UUID.randomUUID() + val startInterval = DateTime.parse("2019-03-01").withTimeAtStartOfDay().withZone(DateTimeZone.UTC) + val secondsId = UUID.randomUUID() + val dayId = UUID.randomUUID() + val minutesId = UUID.randomUUID() private val numberOfGeneratedEntries: Int = 60 * 60 * 5 private val numberOfGeneratedEntriesForDay: Int = Days.daysBetween(startInterval, startInterval.plusYears(5)).getDays - val secondAndMinuteEntries: Seq[Table1Entry] = (0 until numberOfGeneratedEntries).flatMap(diff => { + val secondAndMinuteEntries: Seq[Table1Entry] = (0 until numberOfGeneratedEntries).flatMap(diff => Table1Entry(secondsId, startInterval.plusSeconds(diff)) :: - Table1Entry(minutesId, startInterval.plusMinutes(diff)) :: Nil - }) ++ (0 until numberOfGeneratedEntriesForDay).map(day => Table1Entry(dayId, startInterval.plusDays(day))) + Table1Entry(minutesId, startInterval.plusMinutes(diff)) :: Nil + ) ++ (0 until numberOfGeneratedEntriesForDay).map(day => Table1Entry(dayId, startInterval.plusDays(day))) override val table1Entries: Seq[Table1Entry] = secondAndMinuteEntries val alias = new TableColumn[Long]("time") {} @@ -42,8 +42,8 @@ class ClickhouseTimeSeriesIT extends DslITSpec with TableDrivenPropertyChecks { val lastDayIntervalDate = startInterval.plusDays(numberOfGeneratedEntriesForDay) "Grouping on total" should "return full result" in { - val modifiedStartInterval = startInterval.minus(12416) - val multiInterval = MultiInterval(modifiedStartInterval, lastSecondEntryDate, TotalDuration) + val modifiedStartInterval = startInterval.minus(12416) + val multiInterval = MultiInterval(modifiedStartInterval, lastSecondEntryDate, TotalDuration) val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, secondsId) val rows = results.futureValue.rows rows.size should be(1) @@ -51,84 +51,78 @@ class ClickhouseTimeSeriesIT extends DslITSpec with TableDrivenPropertyChecks { rows.head.shields.toInt should be(numberOfGeneratedEntries) } - "Grouping on second" should "return every appropriate interval" in { + "Grouping on second" should "return every appropriate interval" in forAll(Table("Second", 1, 2, 3, 5, 10, 15, 20, 30)) { duration => - val multiInterval = MultiInterval(startInterval, lastSecondEntryDate, MultiDuration(duration, TimeUnit.Second)) + val multiInterval = MultiInterval(startInterval, lastSecondEntryDate, MultiDuration(duration, TimeUnit.Second)) val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, secondsId) - val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows + val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows validateFullRows(rows, duration) rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } - } - - "Grouping on minutes" should "return appropriate intervals" in { + "Grouping on minutes" should "return appropriate intervals" in forAll(Table("Minute", 1, 2, 3, 5, 10, 15, 20, 30, 90)) { duration => val expectedEntriesPerMinutes = 60 - val multiInterval = MultiInterval(startInterval, lastSecondEntryDate, MultiDuration(duration, TimeUnit.Minute)) + val multiInterval = MultiInterval(startInterval, lastSecondEntryDate, MultiDuration(duration, TimeUnit.Minute)) forAll(Table("Timezone", multiInterval, shiftedTz(multiInterval))) { multiInterval => val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, secondsId) - val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows + val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows validateFullRows(rows, expectedEntriesPerMinutes * duration) rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } } - } - "Grouping on hours" should "properly group interval" in { + "Grouping on hours" should "properly group interval" in forAll(Table("Hour", 1, 2, 3, 4, 6, 8, 12)) { duration => val expectedEntriesPerHour = 60 - val multiInterval = MultiInterval(startInterval, lastMinuteIntervalDate, MultiDuration(duration, TimeUnit.Hour)) + val multiInterval = MultiInterval(startInterval, lastMinuteIntervalDate, MultiDuration(duration, TimeUnit.Hour)) forAll(Table("Timezone", multiInterval, shiftedTz(multiInterval))) { multiInterval => val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, minutesId) - val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows - val expectedCountInFullInterval = expectedEntriesPerHour * duration + val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows + val expectedCountInFullInterval = expectedEntriesPerHour * duration validateFullRows(rows, expectedCountInFullInterval) rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } } - } - "Grouping on days" should "properly group intervals" in { + "Grouping on days" should "properly group intervals" in forAll(Table("Day", 1, 2, 6, 7, 12, 15)) { duration => val expectedEntriesPerDay = 1440 - val multiInterval = MultiInterval(startInterval, lastMinuteIntervalDate, MultiDuration(duration, TimeUnit.Day)) + val multiInterval = MultiInterval(startInterval, lastMinuteIntervalDate, MultiDuration(duration, TimeUnit.Day)) forAll(Table("Timezone", multiInterval, shiftedTz(multiInterval))) { multiInterval => val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, minutesId) - val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows - val expectedCountInFullInterval = expectedEntriesPerDay * duration + val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows + val expectedCountInFullInterval = expectedEntriesPerDay * duration validateFullRows(rows, expectedCountInFullInterval) rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } } - } - "Grouping on weeks" should "properly group intervals" in { + "Grouping on weeks" should "properly group intervals" in forAll(Table("Week", 1, 2, 3, 4)) { duration => val multiInterval = MultiInterval(startInterval, lastDayIntervalDate, MultiDuration(duration, TimeUnit.Week)) forAll(Table("Timezone", multiInterval, shiftedTz(multiInterval))) { multiInterval => val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, dayId) - val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows - val expectedCountInFullInterval = 7 * duration + val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows + val expectedCountInFullInterval = 7 * duration validateFullRows(rows, expectedCountInFullInterval) rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } } - } - "Grouping on months" should "properly group interval" in { + "Grouping on months" should "properly group interval" in forAll(Table("Months", 1, 2, 3, 7)) { duration => val multiInterval = MultiInterval(startInterval, lastDayIntervalDate, MultiDuration(duration, TimeUnit.Month)) forAll(Table("Timezone", multiInterval, shiftedTz(multiInterval))) { multiInterval => val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, dayId) - var expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows - val expectedCountInFullInterval = duration * 30 +- duration * 3 + var expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows + val expectedCountInFullInterval = duration * 30 +- duration * 3 validateFullRows(rows, expectedCountInFullInterval) // fix flaky tests... @@ -140,36 +134,33 @@ class ClickhouseTimeSeriesIT extends DslITSpec with TableDrivenPropertyChecks { rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } } - } - "Grouping on quarters" should "properly group intervals" in { + "Grouping on quarters" should "properly group intervals" in forAll(Table("Quarters", 1, 2, 3, 4)) { duration => val multiInterval = MultiInterval(startInterval, lastDayIntervalDate, MultiDuration(duration, TimeUnit.Quarter)) forAll(Table("Timezone", multiInterval, shiftedTz(multiInterval))) { multiInterval => val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, dayId) - val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows - val expectedCountInFullInterval = duration * 90 +- duration * 3 + val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows + val expectedCountInFullInterval = duration * 90 +- duration * 3 validateFullRows(rows, expectedCountInFullInterval) rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } } - } - "Grouping on years" should "properly group intervals" in { + "Grouping on years" should "properly group intervals" in forAll(Table("Years", 1, 2, 3, 4)) { duration => val multiInterval = MultiInterval(startInterval, lastDayIntervalDate, MultiDuration(duration, TimeUnit.Year)) forAll(Table("Timezone", multiInterval, shiftedTz(multiInterval))) { multiInterval => val results: Future[QueryResult[CustomResult]] = getEntries(multiInterval, dayId) - val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) - val rows = results.futureValue.rows - val expectedCountInFullInterval = duration * 365 +- duration * 4 + val expectedIntervalStarts = multiInterval.subIntervals.map(_.getStart.withZone(DateTimeZone.UTC)) + val rows = results.futureValue.rows + val expectedCountInFullInterval = duration * 365 +- duration * 4 validateFullRows(rows, expectedCountInFullInterval) rows.map(_.time) should contain theSameElementsInOrderAs expectedIntervalStarts } } - } private def getEntries(multiInterval: MultiInterval, entriesId: UUID) = queryExecutor.execute[CustomResult]( @@ -180,13 +171,15 @@ class ClickhouseTimeSeriesIT extends DslITSpec with TableDrivenPropertyChecks { .where(shieldId isEq entriesId) ) - private def validateFullRows(rows: Seq[CustomResult], - expectedCountInFullInterval: TripleEqualsSupport.Spread[Int]): Unit = - //drop first and last as they might not be full intervals + private def validateFullRows( + rows: Seq[CustomResult], + expectedCountInFullInterval: TripleEqualsSupport.Spread[Int] + ): Unit = + // drop first and last as they might not be full intervals rows.drop(1).dropRight(1).foreach(row => row.shields.toInt should be(expectedCountInFullInterval)) private def validateFullRows(rows: Seq[CustomResult], expectedCountInFullInterval: Int): Unit = - //drop first and last as they might not be full intervals + // drop first and last as they might not be full intervals rows.drop(1).dropRight(1).foreach(row => row.shields.toInt should be(expectedCountInFullInterval)) private def shiftedTz(intv: MultiInterval): MultiInterval = diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/JoinQueryIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/JoinQueryIT.scala index 5e1373c5..3db2d225 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/JoinQueryIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/JoinQueryIT.scala @@ -37,7 +37,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks { (JoinQuery.AntiLeftJoin, 0, 20), (JoinQuery.AntiRightJoin, 0, 20), (JoinQuery.SemiLeftJoin, 0, 20), - (JoinQuery.SemiRightJoin, 0, 20), + (JoinQuery.SemiRightJoin, 0, 20) ) ) { (joinType, result, minClickhouseVersion) => it should s"join correctly on: $joinType" in { @@ -45,16 +45,15 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks { // TABLE -- TABLE var query: OperationalQuery = - select(shieldId as itemId) - .from(OneTestTable) - .where(notEmpty(itemId)) - .join(joinType, TwoTestTable) using itemId + select(shieldId as itemId) + .from(OneTestTable) + .where(notEmpty(itemId)) + .join(joinType, TwoTestTable) using itemId var resultRows = queryExecutor.execute[StringResult](query).futureValue.rows resultRows.length shouldBe result // TABLE -- QUERY - query = - select(shieldId as itemId) + query = select(shieldId as itemId) .from(OneTestTable) .where(notEmpty(itemId)) .join(joinType, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId @@ -62,8 +61,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks { resultRows.length shouldBe result // QUERY -- TABLE - query = - select(dsl.all) + query = select(dsl.all) .from( select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId)) ) @@ -73,8 +71,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks { resultRows.length shouldBe result // QUERY -- QUERY - query = - select(dsl.all) + query = select(dsl.all) .from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) .join(joinType, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId resultRows = queryExecutor.execute[StringResult](query).futureValue.rows @@ -86,7 +83,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks { Table( ("joinType", "result"), (JoinQuery.AsOfJoin, 0), - (JoinQuery.AsOfLeftJoin, 0), + (JoinQuery.AsOfLeftJoin, 0) ) ) { (joinType, result) => it should s"join correctly on: $joinType" in { diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/QueryIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/QueryIT.scala index 154aa428..9122d251 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/QueryIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/QueryIT.scala @@ -24,7 +24,10 @@ class QueryIT extends DslITSpec { implicit val resultFormat: RootJsonFormat[Result] = jsonFormat[String, Int, Result](Result.apply, "column_1", "empty") val results: Future[QueryResult[Result]] = queryExecutor.execute[Result]( - select(shieldId as itemId, col1, notEmpty(col1) as "empty") from OneTestTable join (InnerJoin, TwoTestTable) using itemId + select(shieldId as itemId, col1, notEmpty(col1) as "empty") from OneTestTable join ( + InnerJoin, + TwoTestTable + ) using itemId ) results.futureValue.rows.map(_.columnResult) should be(table2Entries.map(_.firstColumn)) results.futureValue.rows.map(_.empty).head should be(1) diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/AggregationFunctionsIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/AggregationFunctionsIT.scala index 99648966..ce167588 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/AggregationFunctionsIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/AggregationFunctionsIT.scala @@ -15,9 +15,8 @@ class AggregationFunctionsIT extends DslITSpec with LazyLogging { private val delta = 2 override val table1Entries: Seq[Table1Entry] = Seq.fill(entries)(Table1Entry(UUID.randomUUID(), numbers = Seq(1, 2, 3))) - override val table2Entries: Seq[Table2Entry] = { + override val table2Entries: Seq[Table2Entry] = (1 to entries).map(i => Table2Entry(UUID.randomUUID(), randomString, i, randomString, None)) - } "Combinators" should "apply for aggregations" in { case class Result(columnResult: String) { @@ -40,7 +39,7 @@ class AggregationFunctionsIT extends DslITSpec with LazyLogging { implicit val resultFormat: RootJsonFormat[Result] = jsonFormat[Seq[Float], Result](Result.apply, "result") val result = queryExecutor .execute[Result]( - select(quantiles(col2, 0.1F, 0.2F, 0.3F, 0.4F, 0.5F, 0.99F) as ref[Seq[Float]]("result")) from TwoTestTable + select(quantiles(col2, 0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.99f) as ref[Seq[Float]]("result")) from TwoTestTable ) .futureValue result.rows.head.result should have length 6 diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ArrayFunctionsIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ArrayFunctionsIT.scala index c235f75a..0aa9e6d3 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ArrayFunctionsIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ArrayFunctionsIT.scala @@ -47,7 +47,9 @@ class ArrayFunctionsIT extends DslITSpec { it should "arrayFunction: intersect" in { execute(select(arrayIntersect(Array(1, 2), Array(1, 3), Array(2, 3)))).futureValue should be("[]") execute(select(arrayIntersect(Array(1, 2), Array(1, 3), Array(1, 4)))).futureValue should be("[1]") - execute(select(arraySort(None, arrayIntersect(Array(1, 2, 3), Array(1, 3, 4), Array(1, 3, 5))))).futureValue should be("[1,3]") + execute( + select(arraySort(None, arrayIntersect(Array(1, 2, 3), Array(1, 3, 4), Array(1, 3, 5)))) + ).futureValue should be("[1,3]") } it should "arrayFunction: reduce" in { diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ComparisonFunctionsIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ComparisonFunctionsIT.scala index 91389322..39b12724 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ComparisonFunctionsIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/ComparisonFunctionsIT.scala @@ -13,11 +13,11 @@ class ComparisonFunctionsIT extends DslITSpec { r(someNum >= 3) shouldBe "1" r(someNum <= 3) shouldBe "0" r(someNum isEq 3) shouldBe "0" - r(notEquals(1,2)) shouldBe "1" - r(isEqual(2L,2)) shouldBe "1" - r(less(1.0,200)) shouldBe "1" - r(greater(1L,2L)) shouldBe "0" - r(lessOrEquals(1,2)) shouldBe "1" - r(greaterOrEquals(1,2)) shouldBe "0" + r(notEquals(1, 2)) shouldBe "1" + r(isEqual(2L, 2)) shouldBe "1" + r(less(1.0, 200)) shouldBe "1" + r(greater(1L, 2L)) shouldBe "0" + r(lessOrEquals(1, 2)) shouldBe "1" + r(greaterOrEquals(1, 2)) shouldBe "0" } -} \ No newline at end of file +} diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctionsIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctionsIT.scala index abedcad0..d2120103 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctionsIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctionsIT.scala @@ -52,7 +52,9 @@ class DateTimeFunctionsIT extends DslITSpec { r(chYesterday()) shouldBe dynNow.minusDays(1).printAsDate r(chToday()) shouldBe dynNow.withTimeAtStartOfDay().printAsDate r(timeSlot(now)) shouldBe now.toStartOfMin(30).printAsDateTime - r(timeSlots(now, toUInt32(1800))) shouldBe s"['${now.toStartOfMin(30).printAsDateTime}','${now.plusMinutes(30).toStartOfMin(30).printAsDateTime}']" + r( + timeSlots(now, toUInt32(1800)) + ) shouldBe s"['${now.toStartOfMin(30).printAsDateTime}','${now.plusMinutes(30).toStartOfMin(30).printAsDateTime}']" r(toISOWeek(August_8_19)) shouldBe "32" r(toISOYear(August_8_19)) shouldBe "2019" r(toWeek(August_8_19)) shouldBe "31" diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HashFunctionsIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HashFunctionsIT.scala index 926a9e01..f4e51f48 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HashFunctionsIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HashFunctionsIT.scala @@ -8,7 +8,7 @@ class HashFunctionsIT extends DslITSpec { it should "succeed for HashFunctions" in { val someStringData = "fooBarBaz" - //TODO these also return the byte format, can we more properly test them? + // TODO these also return the byte format, can we more properly test them? r(halfMD5(someStringData)) shouldBe "14009637059544572277" r(mD5(someStringData)).nonEmpty shouldBe true r(sipHash64(someStringData)).nonEmpty shouldBe true diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctionsIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctionsIT.scala index 46060b88..b45fefcf 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctionsIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctionsIT.scala @@ -55,7 +55,9 @@ class HigherOrderFunctionsIT extends DslITSpec { r(arrayFilter[Long](_ < 0L, arr1)) shouldBe "[]" r(arrayFilter[String](_.like("%World%"), Seq("Hello", "World"))) shouldBe "['World']" - r(arrayFilter2[String]((x, y) => x.concat(y).like("%World"), Seq("Hello", "World"), Seq("Sjoerd", "Leonard"))) shouldBe "[]" + r( + arrayFilter2[String]((x, y) => x.concat(y).like("%World"), Seq("Hello", "World"), Seq("Sjoerd", "Leonard")) + ) shouldBe "[]" } it should "HigherOrderFunctions: arrayFirst" in { @@ -108,7 +110,9 @@ class HigherOrderFunctionsIT extends DslITSpec { } it should "HigherOrderFunctions: arrayReverseSplit" in { - r(arrayReverseSplit[Int]((x, y) => y.notEq(0), Iterable(1, 2, 3, 4, 5), Iterable(1, 0, 0, 1, 0))) shouldBe "[[1],[2,3,4],[5]]" + r( + arrayReverseSplit[Int]((x, y) => y.notEq(0), Iterable(1, 2, 3, 4, 5), Iterable(1, 0, 0, 1, 0)) + ) shouldBe "[[1],[2,3,4],[5]]" } it should "HigherOrderFunctions: arraySort" in { @@ -118,7 +122,9 @@ class HigherOrderFunctionsIT extends DslITSpec { } it should "HigherOrderFunctions: arraySplit" in { - r(arraySplit[Int]((x, y) => y.notEq(0), Iterable(1, 2, 3, 4, 5), Iterable(1, 0, 0, 1, 0))) shouldBe "[[1,2,3],[4,5]]" + r( + arraySplit[Int]((x, y) => y.notEq(0), Iterable(1, 2, 3, 4, 5), Iterable(1, 0, 0, 1, 0)) + ) shouldBe "[[1,2,3],[4,5]]" } it should "HigherOrderFunctions: arraySum" in { diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctionsIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctionsIT.scala index c7d5e241..32f4415a 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctionsIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctionsIT.scala @@ -1,7 +1,7 @@ package com.crobox.clickhouse.dsl.column import com.crobox.clickhouse.dsl._ -import com.crobox.clickhouse.{DslITSpec, dsl => CHDsl} +import com.crobox.clickhouse.{dsl => CHDsl, DslITSpec} class MiscellaneousFunctionsIT extends DslITSpec { diff --git a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/NumericColFunctionIT.scala b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/NumericColFunctionIT.scala index 59c9d3d4..052b849e 100644 --- a/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/NumericColFunctionIT.scala +++ b/dsl/src/it/scala/com/crobox/clickhouse/dsl/column/NumericColFunctionIT.scala @@ -1,7 +1,7 @@ package com.crobox.clickhouse.dsl.column import com.crobox.clickhouse.dsl._ -import com.crobox.clickhouse.{DslITSpec, dsl => CHDsl} +import com.crobox.clickhouse.{dsl => CHDsl, DslITSpec} class NumericColFunctionIT extends DslITSpec { @@ -77,17 +77,17 @@ class NumericColFunctionIT extends DslITSpec { r(log10(123)) should startWith("2.0899") r(sqrt(123)) should startWith("11.090") r(cbrt(123)) should startWith("4.9731") - r(erf(123)) shouldBe ("1") - r(erfc(123)) shouldBe ("0") + r(erf(123)) shouldBe "1" + r(erfc(123)) shouldBe "0" r(lgamma(123)) should startWith("467.41") r(tgamma(123)) should startWith("9.8750") r(sin(123)) should startWith("-0.45990") r(cos(123)) should startWith("-0.88796") r(tan(123)) should startWith("0.51792747") r(asin(1)) should startWith("1.5707") - r(acos(1)) shouldBe ("0") + r(acos(1)) shouldBe "0" r(atan(1)) should startWith("0.78539") - r(pow(123, 2)) shouldBe ("15129") + r(pow(123, 2)) shouldBe "15129" } it should "succeed for RandomFunctions" in { diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/ClickhouseStatement.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/ClickhouseStatement.scala index b7b485fc..3f22a238 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/ClickhouseStatement.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/ClickhouseStatement.scala @@ -3,9 +3,10 @@ package com.crobox.clickhouse.dsl import com.google.common.escape.Escapers /** - * @author Sjoerd Mulder - * @since 2-1-17 - */ + * @author + * Sjoerd Mulder + * @since 2-1-17 + */ object ClickhouseStatement { val DefaultDatabase: String = "default" private val UnquotedIdentifier = "^[a-zA-Z_][0-9a-zA-Z_]*$" @@ -29,7 +30,7 @@ object ClickhouseStatement { def quoteIdentifier(input: String): String = { require(input != null, "Can't quote null as identifier") require(input != "", "Can't quote empty string as identifier") - if(input.matches(UnquotedIdentifier)) { + if (input.matches(UnquotedIdentifier)) { input } else { "`" + Escaper.escape(input) + "`" @@ -40,10 +41,11 @@ object ClickhouseStatement { trait ClickhouseStatement { /** - * Returns the query string for this statement. - * - * @return String containing the Clickhouse dialect SQL statement - */ + * Returns the query string for this statement. + * + * @return + * String containing the Clickhouse dialect SQL statement + */ def query: String } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/GroupByQuery.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/GroupByQuery.scala index d06e07e5..65e22061 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/GroupByQuery.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/GroupByQuery.scala @@ -7,10 +7,12 @@ object GroupByQuery { sealed trait GroupByMode case object WithRollup extends GroupByMode - case object WithCube extends GroupByMode + case object WithCube extends GroupByMode } -case class GroupByQuery(usingColumns: Seq[Column] = Seq.empty, - mode: Option[GroupByMode] = None, - withTotals: Boolean = false) \ No newline at end of file +case class GroupByQuery( + usingColumns: Seq[Column] = Seq.empty, + mode: Option[GroupByMode] = None, + withTotals: Boolean = false +) diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/JoinQuery.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/JoinQuery.scala index 68b1588b..7adecf8c 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/JoinQuery.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/JoinQuery.scala @@ -25,9 +25,9 @@ object JoinQuery { @deprecated( "Please use AllInnerJoin. Old ANY INNER|RIGHT|FULL JOINs are disabled by default. Their logic would be " + - "changed. Old logic is many-to-one for all kinds of ANY JOINs. It's equal to apply distinct for right table keys. " + - "Default behaviour is reserved for many-to-one LEFT JOIN, one-to-many RIGHT JOIN and one-to-one INNER JOIN. It would " + - "be equal to apply distinct for keys to right, left and both tables respectively", + "changed. Old logic is many-to-one for all kinds of ANY JOINs. It's equal to apply distinct for right table keys. " + + "Default behaviour is reserved for many-to-one LEFT JOIN, one-to-many RIGHT JOIN and one-to-one INNER JOIN. It would " + + "be equal to apply distinct for keys to right, left and both tables respectively", "Clickhouse v20" ) case object AnyInnerJoin extends JoinType @@ -35,9 +35,9 @@ object JoinQuery { @deprecated( "Please use AllRightJoin. Old ANY INNER|RIGHT|FULL JOINs are disabled by default. Their logic would be " + - "changed. Old logic is many-to-one for all kinds of ANY JOINs. It's equal to apply distinct for right table keys. " + - "Default behaviour is reserved for many-to-one LEFT JOIN, one-to-many RIGHT JOIN and one-to-one INNER JOIN. It would " + - "be equal to apply distinct for keys to right, left and both tables respectively", + "changed. Old logic is many-to-one for all kinds of ANY JOINs. It's equal to apply distinct for right table keys. " + + "Default behaviour is reserved for many-to-one LEFT JOIN, one-to-many RIGHT JOIN and one-to-one INNER JOIN. It would " + + "be equal to apply distinct for keys to right, left and both tables respectively", "Clickhouse v20" ) case object AnyRightJoin extends JoinType @@ -50,21 +50,27 @@ object JoinQuery { /** * @param joinType * @param other - * @param on Expressions. Column Operator Column, where operator must be one of the following: =, >, >=, <, <=. - * Default set to '=' - * @param using Columns + * @param on + * Expressions. Column Operator Column, where operator must be one of the following: =, >, >=, <, <=. Default set to + * '=' + * @param using + * Columns * @param global */ -case class JoinQuery(joinType: JoinType, - other: FromQuery, - on: Seq[JoinCondition] = Seq.empty, - `using`: Seq[Column] = Seq.empty, - global: Boolean = false) +case class JoinQuery( + joinType: JoinType, + other: FromQuery, + on: Seq[JoinCondition] = Seq.empty, + `using`: Seq[Column] = Seq.empty, + global: Boolean = false +) case class JoinCondition(left: Column, operator: String, right: Column) { - require(JoinCondition.SupportedOperators.contains(operator), - s"Operator[$operator] must be one of: ${JoinCondition.SupportedOperators}") + require( + JoinCondition.SupportedOperators.contains(operator), + s"Operator[$operator] must be one of: ${JoinCondition.SupportedOperators}" + ) } object JoinCondition { diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/OperationalQuery.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/OperationalQuery.scala index 2958fb84..d64d32f0 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/OperationalQuery.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/OperationalQuery.scala @@ -121,8 +121,10 @@ trait OperationalQuery extends Query { OperationalQuery(internalQuery.copy(limit = limit)) def unionAll(otherQuery: OperationalQuery): OperationalQuery = { - require(internalQuery.select.isDefined && otherQuery.internalQuery.select.isDefined, - "Trying to apply UNION ALL on non SELECT queries.") + require( + internalQuery.select.isDefined && otherQuery.internalQuery.select.isDefined, + "Trying to apply UNION ALL on non SELECT queries." + ) require( otherQuery.internalQuery.select.get.columns.size == internalQuery.select.get.columns.size, "SELECT queries needs to have the same number of columns to perform UNION ALL." @@ -137,18 +139,18 @@ trait OperationalQuery extends Query { val selectForGroupCols = selectForGroup.toSeq.flatMap(_.columns) val filteredSelectAll = if (selectForGroupCols.contains(all)) { - //Only keep aliased, we already select all cols + // Only keep aliased, we already select all cols newOrderingColumns.collect { case c: AliasedColumn[_] => c } } else { newOrderingColumns } - val filteredDuplicates = filteredSelectAll.filterNot(column => { + val filteredDuplicates = filteredSelectAll.filterNot(column => selectForGroupCols.exists { case c: Column => column.name == c.name case _ => false } - }) + ) val selectWithOrderColumns = selectForGroupCols ++ filteredDuplicates @@ -156,9 +158,11 @@ trait OperationalQuery extends Query { newSelect } - def join[TargetTable <: Table](joinType: JoinQuery.JoinType, - query: OperationalQuery, - global: Boolean): OperationalQuery = + def join[TargetTable <: Table]( + joinType: JoinQuery.JoinType, + query: OperationalQuery, + global: Boolean + ): OperationalQuery = OperationalQuery(internalQuery.copy(join = Some(JoinQuery(joinType, InnerFromQuery(query), global = global)))) def join[TargetTable <: Table](joinType: JoinQuery.JoinType, table: TargetTable, global: Boolean): OperationalQuery = @@ -256,11 +260,13 @@ trait OperationalQuery extends Query { } /** - * Merge with another OperationalQuery, any conflict on query parts between the 2 joins will be resolved by - * preferring the left querypart over the right one. + * Merge with another OperationalQuery, any conflict on query parts between the 2 joins will be resolved by preferring + * the left querypart over the right one. * - * @param other The right part to merge with this OperationalQuery - * @return A merge of this and other OperationalQuery + * @param other + * The right part to merge with this OperationalQuery + * @return + * A merge of this and other OperationalQuery */ def :+>(other: OperationalQuery): OperationalQuery = OperationalQuery(this.internalQuery :+> other.internalQuery) @@ -268,8 +274,10 @@ trait OperationalQuery extends Query { /** * Right associative version of the merge (:+>) operator. * - * @param other The left part to merge with this OperationalQuery - * @return A merge of this and other OperationalQuery + * @param other + * The left part to merge with this OperationalQuery + * @return + * A merge of this and other OperationalQuery */ def <+:(other: OperationalQuery): OperationalQuery = @@ -278,8 +286,10 @@ trait OperationalQuery extends Query { /** * Tries to merge this OperationalQuery with other * - * @param other The Query parts to merge against - * @return A Success on merge without conflict, or Failure of IllegalArgumentException otherwise. + * @param other + * The Query parts to merge against + * @return + * A Success on merge without conflict, or Failure of IllegalArgumentException otherwise. */ def +(other: OperationalQuery): Try[OperationalQuery] = (this.internalQuery + other.internalQuery).map(OperationalQuery.apply) diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/Query.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/Query.scala index 345b911d..bf9753ba 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/Query.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/Query.scala @@ -27,16 +27,18 @@ case object ASC extends OrderingDirection case object DESC extends OrderingDirection -sealed case class InternalQuery(select: Option[SelectQuery] = None, - from: Option[FromQuery] = None, - prewhere: Option[TableColumn[Boolean]] = None, - where: Option[TableColumn[Boolean]] = None, - groupBy: Option[GroupByQuery] = None, - having: Option[TableColumn[Boolean]] = None, - join: Option[JoinQuery] = None, - orderBy: Seq[(Column, OrderingDirection)] = Seq.empty, - limit: Option[Limit] = None, - unionAll: Seq[OperationalQuery] = Seq.empty) { +sealed case class InternalQuery( + select: Option[SelectQuery] = None, + from: Option[FromQuery] = None, + prewhere: Option[TableColumn[Boolean]] = None, + where: Option[TableColumn[Boolean]] = None, + groupBy: Option[GroupByQuery] = None, + having: Option[TableColumn[Boolean]] = None, + join: Option[JoinQuery] = None, + orderBy: Seq[(Column, OrderingDirection)] = Seq.empty, + limit: Option[Limit] = None, + unionAll: Seq[OperationalQuery] = Seq.empty +) { def isValid: Boolean = { val validGroupBy = groupBy.isEmpty && having.isEmpty || groupBy.nonEmpty @@ -47,11 +49,13 @@ sealed case class InternalQuery(select: Option[SelectQuery] = None, def isPartial: Boolean = !isValid /** - * Merge with another InternalQuery, any conflict on query parts between the 2 joins will be resolved by - * preferring the left querypart over the right one. + * Merge with another InternalQuery, any conflict on query parts between the 2 joins will be resolved by preferring + * the left querypart over the right one. * - * @param other The right part to merge with this InternalQuery - * @return A merge of this and other InternalQuery + * @param other + * The right part to merge with this InternalQuery + * @return + * A merge of this and other InternalQuery */ def :+>(other: InternalQuery): InternalQuery = InternalQuery( @@ -69,19 +73,23 @@ sealed case class InternalQuery(select: Option[SelectQuery] = None, /** * Right associative version of the merge (:+>) operator. * - * @param other The left part to merge with this InternalQuery - * @return A merge of this and other OperationalQuery + * @param other + * The left part to merge with this InternalQuery + * @return + * A merge of this and other OperationalQuery */ def <+:(other: InternalQuery): InternalQuery = :+>(other) /** * Tries to merge this InternalQuery with other * - * @param other The Query parts to merge against - * @return A Success on merge without conflict, or Failure of IllegalArgumentException otherwise. + * @param other + * The Query parts to merge against + * @return + * A Success on merge without conflict, or Failure of IllegalArgumentException otherwise. */ def +(other: InternalQuery): Try[InternalQuery] = Try { - (0 until productArity).foreach(id => { + (0 until productArity).foreach(id => require( (productElement(id), other.productElement(id)) match { case (ts: Option[_], tt: Option[_]) => @@ -93,7 +101,7 @@ sealed case class InternalQuery(select: Option[SelectQuery] = None, }, s"Conflicting parts ${productElement(id)} and ${other.productElement(id)}" ) - }) + ) :+>(other) } } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/QueryFactory.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/QueryFactory.scala index f0a30c54..09e6e153 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/QueryFactory.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/QueryFactory.scala @@ -1,8 +1,8 @@ package com.crobox.clickhouse.dsl /** - * QueryFactory exposes all methods of OperationalQuery from a empty starting point (factoring new queries) - */ + * QueryFactory exposes all methods of OperationalQuery from a empty starting point (factoring new queries) + */ trait QueryFactory extends OperationalQuery { override val internalQuery: InternalQuery = InternalQuery() } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/TableColumn.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/TableColumn.scala index 5d3a4a11..9024a858 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/TableColumn.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/TableColumn.scala @@ -21,11 +21,12 @@ abstract class TableColumn[+V](val name: String) extends Column { case object EmptyColumn extends TableColumn("NULL") -case class NativeColumn[V](override val name: String, - clickhouseType: ColumnType = ColumnType.String, - defaultValue: DefaultValue = DefaultValue.NoDefault, - ttl: Option[TTL] = None) - extends TableColumn[V](name) { +case class NativeColumn[V]( + override val name: String, + clickhouseType: ColumnType = ColumnType.String, + defaultValue: DefaultValue = DefaultValue.NoDefault, + ttl: Option[TTL] = None +) extends TableColumn[V](name) { def query: String = s"$quoted $clickhouseType$defaultValue${TTL.ttl(ttl).map(s => " " + s).getOrElse("")}" } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/AggregationFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/AggregationFunctions.scala index 297409df..fbb73fab 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/AggregationFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/AggregationFunctions.scala @@ -11,11 +11,11 @@ trait AggregationFunctions { with Leveled with AggregationFunctionsCombiners => - //TODO: Magnetize? + // TODO: Magnetize? // Aggregate functions are a whole different beast, they are intercompatible and type passing in a different way then // what most other functions work like - //https://clickhouse.yandex/docs/en/agg_functions/reference + // https://clickhouse.yandex/docs/en/agg_functions/reference abstract class AggregateFunction[+V](targetColumn: Column) extends ExpressionColumn[V](targetColumn) @@ -54,13 +54,12 @@ trait AggregationFunctions { def lastValue[V](tableColumn: TableColumn[V]): LastValue[V] = LastValue(tableColumn) /** - * This function will push back the timestamp represented by tableColumn to the start of this interval, - * this happens deterministically. + * This function will push back the timestamp represented by tableColumn to the start of this interval, this happens + * deterministically. * * Meaning that as long as the duration is the same, your groups will be in the same from/to timestamps * * This is useful for aggregating results by periods of time (group by month, 2 months, days, etc.) - * */ def timeSeries(tableColumn: TableColumn[Long], interval: MultiInterval): TimeSeries = TimeSeries(tableColumn, interval) @@ -74,9 +73,10 @@ trait AggregationFunctions { trait AggregationFunctionsCombiners { self: Magnets with AggregationFunctions => - case class CombinedAggregatedFunction[T <: TableColumn[_], Res](combinator: Combinator[T, Res], - target: AggregateFunction[_]) - extends AggregateFunction[Res](EmptyColumn) + case class CombinedAggregatedFunction[T <: TableColumn[_], Res]( + combinator: Combinator[T, Res], + target: AggregateFunction[_] + ) extends AggregateFunction[Res](EmptyColumn) sealed trait StateResult[V] @@ -107,7 +107,6 @@ trait AggregationFunctionsCombiners { self: Magnets with AggregationFunctions => * |[x3, y3, z3] * * if you run sumForEach(array_col) you will get an array result with the following entries: [sum(x1,x3,x3), sum(y1,y2,y3), sum(z1, z2, z3), sum(u1)] - * */ def forEach[V, T <: TableColumn[Seq[V]], Res]( column: T @@ -226,16 +225,18 @@ trait Leveled { self: Magnets with AggregationFunctions => } /*Works for numbers, dates, and dates with times. Returns: for numbers – Float64; for dates – a date; for dates with times – a date with time.Works for numbers, dates, and dates with times. Returns: for numbers – Float64; for dates – a date; for dates with times – a date with time.*/ - case class Quantile[T](tableColumn: TableColumn[T], - level: Float = 0.5F, - modifier: LevelModifier = LevelModifier.Simple) - extends LeveledAggregatedFunction[T](tableColumn) { + case class Quantile[T]( + tableColumn: TableColumn[T], + level: Float = 0.5f, + modifier: LevelModifier = LevelModifier.Simple + ) extends LeveledAggregatedFunction[T](tableColumn) { require(level >= 0 && level <= 1) } - case class Quantiles[T](tableColumn: TableColumn[T], - levels: Seq[Float], - modifier: LevelModifier = LevelModifier.Simple) - extends LeveledAggregatedFunction[Seq[T]](tableColumn) { + case class Quantiles[T]( + tableColumn: TableColumn[T], + levels: Seq[Float], + modifier: LevelModifier = LevelModifier.Simple + ) extends LeveledAggregatedFunction[Seq[T]](tableColumn) { levels.foreach(level => require(level >= 0 && level <= 1)) } case class Median[T](tableColumn: TableColumn[T], level: Float, modifier: LevelModifier = LevelModifier.Simple) @@ -243,63 +244,65 @@ trait Leveled { self: Magnets with AggregationFunctions => require(level > 0 && level < 1) } - def median[V](target: TableColumn[V], level: Float = 0.5F): Median[V] = Median(target, level = level) + def median[V](target: TableColumn[V], level: Float = 0.5f): Median[V] = Median(target, level = level) - def quantile[V](target: TableColumn[V], level: Float = 0.5F): Quantile[V] = Quantile(target, level = level) + def quantile[V](target: TableColumn[V], level: Float = 0.5f): Quantile[V] = Quantile(target, level = level) def quantiles[V](target: TableColumn[V], levels: Float*): Quantiles[V] = Quantiles(target, levels) - def medianExact[V](target: TableColumn[V], level: Float = 0.5F): Median[V] = + def medianExact[V](target: TableColumn[V], level: Float = 0.5f): Median[V] = Median(target, level, LevelModifier.Exact) - def quantileExact[V](target: TableColumn[V], level: Float = 0.5F): Quantile[V] = + def quantileExact[V](target: TableColumn[V], level: Float = 0.5f): Quantile[V] = Quantile(target, level, LevelModifier.Exact) def quantilesExact[V](target: TableColumn[V], levels: Float*): Quantiles[V] = Quantiles(target, levels, LevelModifier.Exact) - def medianExactWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5F): Median[V] = + def medianExactWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5f): Median[V] = Median(target, level, LevelModifier.ExactWeighted(weight)) - def quantileExactWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5F): Quantile[V] = + def quantileExactWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5f): Quantile[V] = Quantile(target, level, LevelModifier.ExactWeighted(weight)) def quantilesExactWeighted[V](target: TableColumn[V], weight: TableColumn[Int], levels: Float*): Quantiles[V] = Quantiles(target, levels, LevelModifier.ExactWeighted(weight)) - def medianTDigest[V](target: TableColumn[V], level: Float = 0.5F): Median[V] = + def medianTDigest[V](target: TableColumn[V], level: Float = 0.5f): Median[V] = Median(target, level, LevelModifier.TDigest) - def quantileTDigest[V](target: TableColumn[V], level: Float = 0.5F): Quantile[V] = + def quantileTDigest[V](target: TableColumn[V], level: Float = 0.5f): Quantile[V] = Quantile(target, level, LevelModifier.TDigest) def quantilesTDigest[V](target: TableColumn[V], levels: Float*): Quantiles[V] = Quantiles(target, levels, LevelModifier.TDigest) - def medianTiming[V](target: TableColumn[V], level: Float = 0.5F): Median[V] = + def medianTiming[V](target: TableColumn[V], level: Float = 0.5f): Median[V] = Median(target, level, LevelModifier.Timing) - def quantileTiming[V](target: TableColumn[V], level: Float = 0.5F): Quantile[V] = + def quantileTiming[V](target: TableColumn[V], level: Float = 0.5f): Quantile[V] = Quantile(target, level, LevelModifier.Timing) def quantilesTiming[V](target: TableColumn[V], levels: Float*): Quantiles[V] = Quantiles(target, levels, LevelModifier.Timing) - def medianTimingWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5F): Median[V] = + def medianTimingWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5f): Median[V] = Median(target, level, LevelModifier.TimingWeighted(weight)) - def quantileTimingWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5F): Quantile[V] = + def quantileTimingWeighted[V](target: TableColumn[V], weight: TableColumn[Int], level: Float = 0.5f): Quantile[V] = Quantile(target, level, LevelModifier.TimingWeighted(weight)) def quantilesTimingWeighted[V](target: TableColumn[V], weight: TableColumn[Int], levels: Float*): Quantiles[V] = Quantiles(target, levels, LevelModifier.TimingWeighted(weight)) - def medianDeterministic[V, T](target: TableColumn[V], determinator: TableColumn[T], level: Float = 0.5F): Median[V] = + def medianDeterministic[V, T](target: TableColumn[V], determinator: TableColumn[T], level: Float = 0.5f): Median[V] = Median(target, level, LevelModifier.Deterministic(determinator)) - def quantileDeterministic[V, T](target: TableColumn[V], - determinator: TableColumn[T], - level: Float = 0.5F): Quantile[V] = + def quantileDeterministic[V, T]( + target: TableColumn[V], + determinator: TableColumn[T], + level: Float = 0.5f + ): Quantile[V] = Quantile(target, level, LevelModifier.Deterministic(determinator)) def quantilesDeterministic[V, T](target: TableColumn[V], determinator: TableColumn[T], levels: Float*): Quantiles[V] = diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArithmeticFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArithmeticFunctions.scala index ec047c4d..0592b2ac 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArithmeticFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArithmeticFunctions.scala @@ -48,7 +48,7 @@ trait ArithmeticFunctions { self: Magnets => case class Negate[T](t: NumericCol[T]) extends ArithmeticFunctionCol[T](t) case class Abs[T](t: NumericCol[T]) extends ArithmeticFunctionCol[T](t) - //trait ArithmeticFunctionsDsl { + // trait ArithmeticFunctionsDsl { sealed abstract class AritRetType[L, R, O] implicit object IntIntBinding extends AritRetType[Int, Int, Int] @@ -118,8 +118,8 @@ trait ArithmeticFunctions { self: Magnets => def intDiv[L, R, O](left: NumericCol[L], right: NumericCol[R])(implicit ev: AritRetType[L, R, O]): IntDiv[O] = IntDiv[O](left, right) - def intDivOrZero[L, R, O](left: NumericCol[L], right: NumericCol[R])( - implicit ev: AritRetType[L, R, O] + def intDivOrZero[L, R, O](left: NumericCol[L], right: NumericCol[R])(implicit + ev: AritRetType[L, R, O] ): IntDivOrZero[O] = IntDivOrZero[O](left, right) def lcm[L, R, O](left: NumericCol[L], right: NumericCol[R])(implicit ev: AritRetType[L, R, O]): Lcm[O] = diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArrayFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArrayFunctions.scala index d2e1456d..a0b78bf9 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArrayFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ArrayFunctions.scala @@ -25,13 +25,13 @@ trait ArrayFunctions { this: Magnets => case class EmptyArrayString() extends ArrayFunctionConst[String] case class Range(n: NumericCol[_]) extends ArrayFunctionConst[Long] - case class EmptyArrayToSingle[V](col: ArrayColMagnet[V]) extends ArrayFunctionOp[V] - case class Array[V](columns: ConstOrColMagnet[V]*) extends ArrayFunctionOp[Iterable[V]] - case class ArrayConcat[V](col1: ArrayColMagnet[V], columns: ArrayColMagnet[V]*) extends ArrayFunctionOp[Iterable[V]] - case class ArrayElement[V](col: ArrayColMagnet[_ <: Iterable[V]], n: NumericCol[_]) extends ArrayFunctionOp[V] - case class Has[V](col: ArrayColMagnet[V], elm: Magnet[V]) extends ArrayFunctionOp[Boolean] - case class HasAll[V](col: ArrayColMagnet[V], elm: Magnet[V]) extends ArrayFunctionOp[Boolean] - case class HasAny[V](col: ArrayColMagnet[V], elm: Magnet[V]) extends ArrayFunctionOp[Boolean] + case class EmptyArrayToSingle[V](col: ArrayColMagnet[V]) extends ArrayFunctionOp[V] + case class Array[V](columns: ConstOrColMagnet[V]*) extends ArrayFunctionOp[Iterable[V]] + case class ArrayConcat[V](col1: ArrayColMagnet[V], columns: ArrayColMagnet[V]*) extends ArrayFunctionOp[Iterable[V]] + case class ArrayElement[V](col: ArrayColMagnet[_ <: Iterable[V]], n: NumericCol[_]) extends ArrayFunctionOp[V] + case class Has[V](col: ArrayColMagnet[V], elm: Magnet[V]) extends ArrayFunctionOp[Boolean] + case class HasAll[V](col: ArrayColMagnet[V], elm: Magnet[V]) extends ArrayFunctionOp[Boolean] + case class HasAny[V](col: ArrayColMagnet[V], elm: Magnet[V]) extends ArrayFunctionOp[Boolean] case class IndexOf[V](col: ArrayColMagnet[_ <: Iterable[V]], elm: ConstOrColMagnet[V]) extends ArrayFunctionOp[Long] case class CountEqual[V](col: ArrayColMagnet[_ <: Iterable[V]], elm: ConstOrColMagnet[V]) extends ArrayFunctionOp[Long] @@ -56,10 +56,11 @@ trait ArrayFunctions { this: Magnets => case class ArrayDistinct[V](col: ArrayColMagnet[_ <: Iterable[V]]) extends ArrayFunctionOp[Iterable[V]] case class ArrayIntersect[V](col: ArrayColMagnet[_ <: Iterable[V]], columns: ArrayColMagnet[_ <: Iterable[V]]*) extends ArrayFunctionOp[Iterable[V]] - case class ArrayReduce[V](function: String, - col: ArrayColMagnet[_ <: Iterable[V]], - columns: ArrayColMagnet[_ <: Iterable[V]]*) - extends ArrayFunctionOp[V] + case class ArrayReduce[V]( + function: String, + col: ArrayColMagnet[_ <: Iterable[V]], + columns: ArrayColMagnet[_ <: Iterable[V]]* + ) extends ArrayFunctionOp[V] case class ArrayReverse[V](col: ArrayColMagnet[_ <: Iterable[V]]) extends ArrayFunctionOp[Iterable[V]] // new 22-07-15 case class ArrayEmpty(col: ArrayColMagnet[_]) extends ArrayFunctionOp[Boolean] @@ -96,8 +97,10 @@ trait ArrayFunctions { this: Magnets => CountEqual[V](col, elm) def arrayEnumerate[V](col: ArrayColMagnet[V]): ArrayEnumerate[V] = ArrayEnumerate(col) - def arrayEnumerateUniq[V](col1: ArrayColMagnet[_ <: Iterable[V]], - coln: ArrayColMagnet[_ <: Iterable[V]]*): ArrayEnumerateUniq[V] = + def arrayEnumerateUniq[V]( + col1: ArrayColMagnet[_ <: Iterable[V]], + coln: ArrayColMagnet[_ <: Iterable[V]]* + ): ArrayEnumerateUniq[V] = ArrayEnumerateUniq[V](col1, coln: _*) def arrayPopBack[V](col: ArrayColMagnet[_ <: Iterable[V]]): ArrayPopBack[V] = ArrayPopBack[V](col) def arrayPopFront[V](col: ArrayColMagnet[_ <: Iterable[V]]): ArrayPopFront[V] = ArrayPopFront[V](col) @@ -108,14 +111,18 @@ trait ArrayFunctions { this: Magnets => def arrayPushFront[V](col: ArrayColMagnet[_ <: Iterable[V]], elm: ConstOrColMagnet[V]): ArrayPushFront[V] = ArrayPushFront[V](col, elm) - def arrayResize[V](col: ArrayColMagnet[_ <: Iterable[V]], - size: NumericCol[_], - extender: ConstOrColMagnet[V]): ArrayResize[V] = + def arrayResize[V]( + col: ArrayColMagnet[_ <: Iterable[V]], + size: NumericCol[_], + extender: ConstOrColMagnet[V] + ): ArrayResize[V] = ArrayResize[V](col, size, extender) - def arraySlice[V](col: ArrayColMagnet[_ <: Iterable[V]], - offset: NumericCol[_], - length: NumericCol[_] = 0): ArraySlice[V] = + def arraySlice[V]( + col: ArrayColMagnet[_ <: Iterable[V]], + offset: NumericCol[_], + length: NumericCol[_] = 0 + ): ArraySlice[V] = ArraySlice[V](col, offset, length) def arrayUniq[V](col1: ArrayColMagnet[_ <: Iterable[V]], coln: ArrayColMagnet[_ <: Iterable[V]]*): ArrayUniq[V] = @@ -126,30 +133,36 @@ trait ArrayFunctions { this: Magnets => def arrayDifference[V](col: ArrayColMagnet[_ <: Iterable[V]]): ArrayDifference[V] = ArrayDifference[V](col) def arrayDistinct[V](col: ArrayColMagnet[_ <: Iterable[V]]): ArrayDistinct[V] = ArrayDistinct[V](col) - def arrayIntersect[V](col1: ArrayColMagnet[_ <: Iterable[V]], - col2: ArrayColMagnet[_ <: Iterable[V]], - columns: ArrayColMagnet[_ <: Iterable[V]]*): ArrayIntersect[V] = + def arrayIntersect[V]( + col1: ArrayColMagnet[_ <: Iterable[V]], + col2: ArrayColMagnet[_ <: Iterable[V]], + columns: ArrayColMagnet[_ <: Iterable[V]]* + ): ArrayIntersect[V] = ArrayIntersect[V](col1, Seq(col2) ++ columns: _*) - def arrayReduce[V](function: String, - col: ArrayColMagnet[_ <: Iterable[V]], - columns: ArrayColMagnet[_ <: Iterable[V]]*): ArrayReduce[V] = + def arrayReduce[V]( + function: String, + col: ArrayColMagnet[_ <: Iterable[V]], + columns: ArrayColMagnet[_ <: Iterable[V]]* + ): ArrayReduce[V] = ArrayReduce[V](function, col, columns: _*) def arrayReverse[V](col: ArrayColMagnet[_ <: Iterable[V]]): ArrayReverse[V] = ArrayReverse[V](col) /** - * Special function that checks if given arrays 'share' at least one element. - * Basically it's a wrapper around arrayIntersect (by using notEmpty) - + * Special function that checks if given arrays 'share' at least one element. Basically it's a wrapper around + * arrayIntersect (by using notEmpty) + * * @param col1 * @param col2 * @param columns * @tparam V * @return */ - def arrayMatch[V](col1: ArrayColMagnet[_ <: Iterable[V]], - col2: ArrayColMagnet[_ <: Iterable[V]], - columns: ArrayColMagnet[_ <: Iterable[V]]*): ExpressionColumn[Boolean] = + def arrayMatch[V]( + col1: ArrayColMagnet[_ <: Iterable[V]], + col2: ArrayColMagnet[_ <: Iterable[V]], + columns: ArrayColMagnet[_ <: Iterable[V]]* + ): ExpressionColumn[Boolean] = notEmpty(arrayIntersect(col1, col2, columns: _*)) def arrayEmpty(col: ArrayColMagnet[_]): ArrayEmpty = ArrayEmpty(col) diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctions.scala index 92d02e25..e5e7bd2b 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DateTimeFunctions.scala @@ -10,16 +10,16 @@ trait DateTimeFunctions { self: Magnets => extends ExpressionColumn(ddt.column) with DateTimeFunction - abstract class DateTimeConst[V]() extends ExpressionColumn[V](EmptyColumn) with DateTimeFunction - case class Year(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) - case class YYYYMM(d: DateOrDateTime[_]) extends DateTimeFunctionCol[String](d) - case class Month(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) - case class DayOfMonth(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) - case class DayOfWeek(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) - case class Hour(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) - case class Minute(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) - case class Second(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) - case class Monday[V](d: DateOrDateTime[_]) extends DateTimeFunctionCol[V](d) + abstract class DateTimeConst[V]() extends ExpressionColumn[V](EmptyColumn) with DateTimeFunction + case class Year(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) + case class YYYYMM(d: DateOrDateTime[_]) extends DateTimeFunctionCol[String](d) + case class Month(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) + case class DayOfMonth(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) + case class DayOfWeek(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) + case class Hour(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) + case class Minute(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) + case class Second(d: DateOrDateTime[_]) extends DateTimeFunctionCol[Int](d) + case class Monday[V](d: DateOrDateTime[_]) extends DateTimeFunctionCol[V](d) case class AddSeconds(d: DateOrDateTime[_], seconds: NumericCol[_]) extends DateTimeFunctionCol[DateTime](d) case class AddMinutes(d: DateOrDateTime[_], minutes: NumericCol[_]) extends DateTimeFunctionCol[DateTime](d) case class AddHours(d: DateOrDateTime[_], hours: NumericCol[_]) extends DateTimeFunctionCol[DateTime](d) diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DictionaryFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DictionaryFunctions.scala index b9bf0674..1239f7ba 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DictionaryFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/DictionaryFunctions.scala @@ -7,75 +7,221 @@ import org.joda.time.{DateTime, LocalDate} trait DictionaryFunctions { self: Magnets => - sealed abstract class DictionaryGetFuncColumn[V](val dictName: StringColMagnet[_], val attrName: StringColMagnet[_], val id: ConstOrColMagnet[_], val default: Option[Magnet[V]] = None) extends - DictionaryFuncColumn[V] + sealed abstract class DictionaryGetFuncColumn[V]( + val dictName: StringColMagnet[_], + val attrName: StringColMagnet[_], + val id: ConstOrColMagnet[_], + val default: Option[Magnet[V]] = None + ) extends DictionaryFuncColumn[V] - sealed abstract class DictionaryFuncColumn[V] extends - ExpressionColumn[V](EmptyColumn) + sealed abstract class DictionaryFuncColumn[V] extends ExpressionColumn[V](EmptyColumn) - case class DictGetUInt8(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetUInt16(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetUInt32(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetUInt64(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetInt8(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetInt16(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetInt32(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetInt64(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Long]] = None) - extends DictionaryGetFuncColumn[Long](_dictName,_attrName,_id,_default) - case class DictGetFloat32(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Float]] = None) - extends DictionaryGetFuncColumn[Float](_dictName,_attrName,_id,_default) - case class DictGetFloat64(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[Float]] = None) - extends DictionaryGetFuncColumn[Float](_dictName,_attrName,_id,_default) - case class DictGetDate(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[LocalDate]] = None) - extends DictionaryGetFuncColumn[LocalDate](_dictName,_attrName,_id,_default) - case class DictGetDateTime(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[DateTime]] = None) - extends DictionaryGetFuncColumn[DateTime](_dictName,_attrName,_id,_default) - case class DictGetUUID(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[UUID]] = None) - extends DictionaryGetFuncColumn[UUID](_dictName,_attrName,_id,_default) - case class DictGetString(_dictName: StringColMagnet[_], _attrName: StringColMagnet[_], _id: ConstOrColMagnet[_], _default: Option[Magnet[String]] = None) - extends DictionaryGetFuncColumn[String](_dictName,_attrName,_id,_default) + case class DictGetUInt8( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetUInt16( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetUInt32( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetUInt64( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetInt8( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetInt16( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetInt32( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetInt64( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Long]] = None + ) extends DictionaryGetFuncColumn[Long](_dictName, _attrName, _id, _default) + case class DictGetFloat32( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Float]] = None + ) extends DictionaryGetFuncColumn[Float](_dictName, _attrName, _id, _default) + case class DictGetFloat64( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[Float]] = None + ) extends DictionaryGetFuncColumn[Float](_dictName, _attrName, _id, _default) + case class DictGetDate( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[LocalDate]] = None + ) extends DictionaryGetFuncColumn[LocalDate](_dictName, _attrName, _id, _default) + case class DictGetDateTime( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[DateTime]] = None + ) extends DictionaryGetFuncColumn[DateTime](_dictName, _attrName, _id, _default) + case class DictGetUUID( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[UUID]] = None + ) extends DictionaryGetFuncColumn[UUID](_dictName, _attrName, _id, _default) + case class DictGetString( + _dictName: StringColMagnet[_], + _attrName: StringColMagnet[_], + _id: ConstOrColMagnet[_], + _default: Option[Magnet[String]] = None + ) extends DictionaryGetFuncColumn[String](_dictName, _attrName, _id, _default) - case class DictIsIn(dictName: StringColMagnet[_], childId: ConstOrColMagnet[_], ancestorId: ConstOrColMagnet[_]) extends DictionaryFuncColumn[Boolean] - case class DictGetHierarchy(dictName: StringColMagnet[_], id: ConstOrColMagnet[_]) extends DictionaryFuncColumn[String] + case class DictIsIn(dictName: StringColMagnet[_], childId: ConstOrColMagnet[_], ancestorId: ConstOrColMagnet[_]) + extends DictionaryFuncColumn[Boolean] + case class DictGetHierarchy(dictName: StringColMagnet[_], id: ConstOrColMagnet[_]) + extends DictionaryFuncColumn[String] case class DictHas(dictName: StringColMagnet[_], id: ConstOrColMagnet[_]) extends DictionaryFuncColumn[Boolean] - def dictGetUInt8 (dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetUInt8(dictName, attrName, id) - def dictGetUInt16(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetUInt16(dictName, attrName, id) - def dictGetUInt32(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetUInt32(dictName, attrName, id) - def dictGetUInt64(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetUInt64(dictName, attrName, id) - def dictGetInt8 (dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetInt8(dictName, attrName, id) - def dictGetInt16 (dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetInt16(dictName, attrName, id) - def dictGetInt32 (dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetInt32(dictName, attrName, id) - def dictGetInt64 (dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetInt64(dictName, attrName, id) - def dictGetFloat32(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetFloat32(dictName, attrName, id) - def dictGetFloat64(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetFloat64(dictName, attrName, id) - def dictGetDate (dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetDate(dictName, attrName, id) - def dictGetDateTime(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetDateTime(dictName, attrName, id) - def dictGetUUID (dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetUUID(dictName, attrName, id) - def dictGetString(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetString(dictName, attrName, id) - def dictIsIn (dictName: StringColMagnet[_], childId: ConstOrColMagnet[_], id: ConstOrColMagnet[_]) = DictIsIn(dictName, childId, id) + def dictGetUInt8(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetUInt8(dictName, attrName, id) + def dictGetUInt16(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetUInt16(dictName, attrName, id) + def dictGetUInt32(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetUInt32(dictName, attrName, id) + def dictGetUInt64(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetUInt64(dictName, attrName, id) + def dictGetInt8(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetInt8(dictName, attrName, id) + def dictGetInt16(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetInt16(dictName, attrName, id) + def dictGetInt32(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetInt32(dictName, attrName, id) + def dictGetInt64(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetInt64(dictName, attrName, id) + def dictGetFloat32(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetFloat32(dictName, attrName, id) + def dictGetFloat64(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetFloat64(dictName, attrName, id) + def dictGetDate(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetDate(dictName, attrName, id) + def dictGetDateTime(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetDateTime(dictName, attrName, id) + def dictGetUUID(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetUUID(dictName, attrName, id) + def dictGetString(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_]) = + DictGetString(dictName, attrName, id) + def dictIsIn(dictName: StringColMagnet[_], childId: ConstOrColMagnet[_], id: ConstOrColMagnet[_]) = + DictIsIn(dictName, childId, id) def dictGetHierarchy(dictName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictGetHierarchy(dictName, id) - def dictHas (dictName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictHas(dictName, id) + def dictHas(dictName: StringColMagnet[_], id: ConstOrColMagnet[_]) = DictHas(dictName, id) - def dictGetUInt8OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetUInt8(dictName, attrName, id, Some(default)) - def dictGetUInt16OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetUInt16(dictName, attrName, id, Some(default)) - def dictGetUInt32OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetUInt32(dictName, attrName, id, Some(default)) - def dictGetUInt64OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetUInt64(dictName, attrName, id, Some(default)) - def dictGetInt8OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetInt8(dictName, attrName, id, Some(default)) - def dictGetInt16OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetInt16(dictName, attrName, id, Some(default)) - def dictGetInt32OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetInt32(dictName, attrName, id, Some(default)) - def dictGetInt64OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Long]) = DictGetInt64(dictName, attrName, id, Some(default)) - def dictGetFloat32OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Float]) = DictGetFloat32(dictName, attrName, id, Some(default)) - def dictGetFloat64OrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[Float]) = DictGetFloat64(dictName, attrName, id, Some(default)) - def dictGetDateOrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[LocalDate]) = DictGetDate(dictName, attrName, id, Some(default)) - def dictGetDateTimeOrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[DateTime]) = DictGetDateTime(dictName, attrName, id, Some(default)) - def dictGetUUIDOrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[UUID]) = DictGetUUID(dictName, attrName, id, Some(default)) - def dictGetStringOrDefault(dictName: StringColMagnet[_], attrName: StringColMagnet[_], id: ConstOrColMagnet[_], default: Magnet[String]) = DictGetString(dictName, attrName, id, Some(default)) + def dictGetUInt8OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetUInt8(dictName, attrName, id, Some(default)) + def dictGetUInt16OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetUInt16(dictName, attrName, id, Some(default)) + def dictGetUInt32OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetUInt32(dictName, attrName, id, Some(default)) + def dictGetUInt64OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetUInt64(dictName, attrName, id, Some(default)) + def dictGetInt8OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetInt8(dictName, attrName, id, Some(default)) + def dictGetInt16OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetInt16(dictName, attrName, id, Some(default)) + def dictGetInt32OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetInt32(dictName, attrName, id, Some(default)) + def dictGetInt64OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Long] + ) = DictGetInt64(dictName, attrName, id, Some(default)) + def dictGetFloat32OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Float] + ) = DictGetFloat32(dictName, attrName, id, Some(default)) + def dictGetFloat64OrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[Float] + ) = DictGetFloat64(dictName, attrName, id, Some(default)) + def dictGetDateOrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[LocalDate] + ) = DictGetDate(dictName, attrName, id, Some(default)) + def dictGetDateTimeOrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[DateTime] + ) = DictGetDateTime(dictName, attrName, id, Some(default)) + def dictGetUUIDOrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[UUID] + ) = DictGetUUID(dictName, attrName, id, Some(default)) + def dictGetStringOrDefault( + dictName: StringColMagnet[_], + attrName: StringColMagnet[_], + id: ConstOrColMagnet[_], + default: Magnet[String] + ) = DictGetString(dictName, attrName, id, Some(default)) } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/EncodingFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/EncodingFunctions.scala index 85fc0def..9a7b2d73 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/EncodingFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/EncodingFunctions.scala @@ -5,17 +5,17 @@ import com.crobox.clickhouse.dsl.{Column, ExpressionColumn} trait EncodingFunctions { self: Magnets => abstract class EncodingFunction[O](val column: Column) extends ExpressionColumn[O](column) - case class Hex(col: HexCompatible[_]) extends EncodingFunction[String](col.column) + case class Hex(col: HexCompatible[_]) extends EncodingFunction[String](col.column) case class Unhex(col: StringColMagnet[_]) extends EncodingFunction[String](col.column) case class UUIDStringToNum(col: StringColMagnet[_]) extends EncodingFunction[Byte](col.column) case class UUIDNumToString(col: StringColMagnet[_]) extends EncodingFunction[Byte](col.column) - case class BitmaskToList(col: NumericCol[_]) extends EncodingFunction[String](col.column) - case class BitmaskToArray(col: NumericCol[_]) extends EncodingFunction[Iterable[Long]](col.column) + case class BitmaskToList(col: NumericCol[_]) extends EncodingFunction[String](col.column) + case class BitmaskToArray(col: NumericCol[_]) extends EncodingFunction[Iterable[Long]](col.column) def hex(col: HexCompatible[_]) = Hex(col) - def unhex(col: StringColMagnet[_]) = Unhex(col) - def uUIDStringToNum(col: StringColMagnet[_]) = UUIDStringToNum(col) - def uUIDNumToString(col: StringColMagnet[_]) = UUIDNumToString(col) + def unhex(col: StringColMagnet[_]) = Unhex(col) + def uUIDStringToNum(col: StringColMagnet[_]) = UUIDStringToNum(col) + def uUIDNumToString(col: StringColMagnet[_]) = UUIDNumToString(col) def bitmaskToList(col: NumericCol[_]) = BitmaskToList(col) def bitmaskToArray(col: NumericCol[_]) = BitmaskToArray(col) } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HashFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HashFunctions.scala index 8eab7cdc..3aade2b9 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HashFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HashFunctions.scala @@ -5,27 +5,27 @@ import com.crobox.clickhouse.dsl.{Column, ExpressionColumn} trait HashFunctions { self: Magnets => abstract class HashFunction(col: Column) extends ExpressionColumn[String](col) - case class HalfMD5(col: StringColMagnet[_]) extends HashFunction(col.column) - case class MD5(col: StringColMagnet[_]) extends HashFunction(col.column) - case class SipHash64(col: StringColMagnet[_]) extends HashFunction(col.column) - case class SipHash128(col: StringColMagnet[_]) extends HashFunction(col.column) + case class HalfMD5(col: StringColMagnet[_]) extends HashFunction(col.column) + case class MD5(col: StringColMagnet[_]) extends HashFunction(col.column) + case class SipHash64(col: StringColMagnet[_]) extends HashFunction(col.column) + case class SipHash128(col: StringColMagnet[_]) extends HashFunction(col.column) case class CityHash64(col1: ConstOrColMagnet[_], coln: ConstOrColMagnet[_]*) extends HashFunction(col1.column) - case class IntHash32(col: NumericCol[_]) extends HashFunction(col.column) - case class IntHash64(col: NumericCol[_]) extends HashFunction(col.column) - case class SHA1(col: ConstOrColMagnet[_]) extends HashFunction(col.column) - case class SHA224(col: ConstOrColMagnet[_]) extends HashFunction(col.column) - case class SHA256(col: ConstOrColMagnet[_]) extends HashFunction(col.column) + case class IntHash32(col: NumericCol[_]) extends HashFunction(col.column) + case class IntHash64(col: NumericCol[_]) extends HashFunction(col.column) + case class SHA1(col: ConstOrColMagnet[_]) extends HashFunction(col.column) + case class SHA224(col: ConstOrColMagnet[_]) extends HashFunction(col.column) + case class SHA256(col: ConstOrColMagnet[_]) extends HashFunction(col.column) case class URLHash(col: ConstOrColMagnet[_], depth: NumericCol[_]) extends HashFunction(col.column) - def halfMD5(col: StringColMagnet[_]) = HalfMD5(col) - def mD5(col: StringColMagnet[_]) = MD5(col) - def sipHash64(col: StringColMagnet[_]) = SipHash64(col) - def sipHash128(col: StringColMagnet[_]) = SipHash128(col) + def halfMD5(col: StringColMagnet[_]) = HalfMD5(col) + def mD5(col: StringColMagnet[_]) = MD5(col) + def sipHash64(col: StringColMagnet[_]) = SipHash64(col) + def sipHash128(col: StringColMagnet[_]) = SipHash128(col) def cityHash64(col1: ConstOrColMagnet[_], coln: ConstOrColMagnet[_]*) = CityHash64(col1, coln: _*) - def intHash32(col: NumericCol[_]) = IntHash32(col) - def intHash64(col: NumericCol[_]) = IntHash64(col) - def sHA1(col: ConstOrColMagnet[_]) = SHA1(col) - def sHA224(col: ConstOrColMagnet[_]) = SHA224(col) - def sHA256(col: ConstOrColMagnet[_]) = SHA256(col) + def intHash32(col: NumericCol[_]) = IntHash32(col) + def intHash64(col: NumericCol[_]) = IntHash64(col) + def sHA1(col: ConstOrColMagnet[_]) = SHA1(col) + def sHA224(col: ConstOrColMagnet[_]) = SHA224(col) + def sHA256(col: ConstOrColMagnet[_]) = SHA256(col) def uRLHash(col: ConstOrColMagnet[_], depth: NumericCol[_]) = URLHash(col, depth) } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctions.scala index 866e4539..8bac8549 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/HigherOrderFunctions.scala @@ -11,52 +11,60 @@ trait HigherOrderFunctions { self: Magnets => ) extends ExpressionColumn[R](EmptyColumn) // double type casts - case class ArrayAll[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, Boolean](_func1, _func2, _func3, _arrays: _*) - case class ArrayAvg[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, Double](_func1, _func2, _func3, _arrays: _*) - case class ArrayCumSum[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, Iterable[O]](_func1, _func2, _func3, _arrays: _*) - case class ArrayMap[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, Iterable[O]](_func1, _func2, _func3, _arrays: _*) - case class ArrayMax[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, O](_func1, _func2, _func3, _arrays: _*) - case class ArrayMin[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, O](_func1, _func2, _func3, _arrays: _*) + case class ArrayAll[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, Boolean](_func1, _func2, _func3, _arrays: _*) + case class ArrayAvg[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, Double](_func1, _func2, _func3, _arrays: _*) + case class ArrayCumSum[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, Iterable[O]](_func1, _func2, _func3, _arrays: _*) + case class ArrayMap[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, Iterable[O]](_func1, _func2, _func3, _arrays: _*) + case class ArrayMax[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, O](_func1, _func2, _func3, _arrays: _*) + case class ArrayMin[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, O](_func1, _func2, _func3, _arrays: _*) case class ArrayReverseSort[I, O]( _func1: Option[TableColumn[I] => ExpressionColumn[O]], _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], _arrays: ArrayColMagnet[_ <: Iterable[I]]* ) extends HigherOrderFunction[I, O, Iterable[O]](_func1, _func2, _func3, _arrays: _*) - case class ArraySort[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, Iterable[O]](_func1, _func2, _func3, _arrays: _*) - case class ArraySum[I, O](_func1: Option[TableColumn[I] => ExpressionColumn[O]], - _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], - _arrays: ArrayColMagnet[_ <: Iterable[I]]*) - extends HigherOrderFunction[I, O, O](_func1, _func2, _func3, _arrays: _*) + case class ArraySort[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, Iterable[O]](_func1, _func2, _func3, _arrays: _*) + case class ArraySum[I, O]( + _func1: Option[TableColumn[I] => ExpressionColumn[O]], + _func2: Option[(TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _func3: Option[(TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O]], + _arrays: ArrayColMagnet[_ <: Iterable[I]]* + ) extends HigherOrderFunction[I, O, O](_func1, _func2, _func3, _arrays: _*) // single type casts case class ArrayCount[I]( @@ -110,255 +118,355 @@ trait HigherOrderFunctions { self: Magnets => _arrays: ArrayColMagnet[_ <: Iterable[I]]* ) extends HigherOrderFunction[I, Boolean, Iterable[Iterable[I]]](None, Option(_func2), None, _arrays: _*) - def arrayAll[I](func: TableColumn[I] => ExpressionColumn[Boolean], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Boolean] = + def arrayAll[I]( + func: TableColumn[I] => ExpressionColumn[Boolean], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Boolean] = ArrayAll(Option(func), None, None, array) - def arrayAll2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Boolean] = + def arrayAll2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Boolean] = ArrayAll(None, Option(func), None, array1, array2) - def arrayAll3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Boolean] = + def arrayAll3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Boolean] = ArrayAll(None, None, Option(func), array1, array2, array3) - def arrayAvg[I, O](func: Option[TableColumn[I] => ExpressionColumn[O]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Double] = + def arrayAvg[I, O]( + func: Option[TableColumn[I] => ExpressionColumn[O]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Double] = ArrayAvg(func, None, None, array) - def arrayAvg2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Double] = + def arrayAvg2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Double] = ArrayAvg(None, Option(func), None, array1, array2) - def arrayAvg3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Double] = + def arrayAvg3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Double] = ArrayAvg(None, None, Option(func), array1, array2, array3) - def arrayCount[I](func: Option[TableColumn[I] => ExpressionColumn[Boolean]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Int] = + def arrayCount[I]( + func: Option[TableColumn[I] => ExpressionColumn[Boolean]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Int] = ArrayCount(func, None, None, array) - def arrayCount2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Int] = + def arrayCount2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Int] = ArrayCount(None, Option(func), None, array1, array2) - def arrayCount3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Int] = + def arrayCount3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Int] = ArrayCount(None, None, Option(func), array1, array2, array3) - def arrayCumSum[I, O](func: Option[TableColumn[I] => ExpressionColumn[O]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayCumSum[I, O]( + func: Option[TableColumn[I] => ExpressionColumn[O]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayCumSum(func, None, None, array) - def arrayCumSum2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayCumSum2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayCumSum(None, Option(func), None, array1, array2) - def arrayCumSum3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayCumSum3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayCumSum(None, None, Option(func), array1, array2, array3) - def arrayExists[I](func: TableColumn[I] => ExpressionColumn[Boolean], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Boolean] = + def arrayExists[I]( + func: TableColumn[I] => ExpressionColumn[Boolean], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Boolean] = ArrayExists(Option(func), None, None, array) - def arrayExists2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Boolean] = + def arrayExists2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Boolean] = ArrayExists(None, Option(func), None, array1, array2) - def arrayExists3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Boolean] = + def arrayExists3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Boolean] = ArrayExists(None, None, Option(func), array1, array2, array3) - def arrayFill[I](func: TableColumn[I] => ExpressionColumn[Boolean], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayFill[I]( + func: TableColumn[I] => ExpressionColumn[Boolean], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayFill(Option(func), None, None, array) // @todo This doesn't make sense - def arrayFill2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayFill2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayFill(None, Option(func), None, array1, array2) // @todo This doesn't make sense - def arrayFill3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayFill3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayFill(None, None, Option(func), array1, array2, array3) - def arrayFilter[I](func: TableColumn[I] => ExpressionColumn[Boolean], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayFilter[I]( + func: TableColumn[I] => ExpressionColumn[Boolean], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayFilter(Option(func), None, None, array) - def arrayFilter2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayFilter2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayFilter(None, Option(func), None, array1, array2) - def arrayFilter3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayFilter3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayFilter(None, None, Option(func), array1, array2, array3) - def arrayFirst[I](func: TableColumn[I] => ExpressionColumn[Boolean], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[I] = + def arrayFirst[I]( + func: TableColumn[I] => ExpressionColumn[Boolean], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[I] = ArrayFirst(Option(func), None, None, array) - def arrayFirst2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[I] = + def arrayFirst2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[I] = ArrayFirst(None, Option(func), None, array1, array2) - def arrayFirst3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[I] = + def arrayFirst3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[I] = ArrayFirst(None, None, Option(func), array1, array2, array3) - def arrayFirstIndex[I](func: TableColumn[I] => ExpressionColumn[Boolean], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Int] = + def arrayFirstIndex[I]( + func: TableColumn[I] => ExpressionColumn[Boolean], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Int] = ArrayFirstIndex(Option(func), None, None, array) - def arrayFirstIndex2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Int] = + def arrayFirstIndex2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Int] = ArrayFirstIndex(None, Option(func), None, array1, array2) - def arrayFirstIndex3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Int] = + def arrayFirstIndex3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Int] = ArrayFirstIndex(None, None, Option(func), array1, array2, array3) - def arrayMap[I, O](func: TableColumn[I] => ExpressionColumn[O], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayMap[I, O]( + func: TableColumn[I] => ExpressionColumn[O], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayMap(Option(func), None, None, array) - def arrayMap2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayMap2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayMap(None, Option(func), None, array1, array2) - def arrayMap3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayMap3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayMap(None, None, Option(func), array1, array2, array3) - def arrayMax[I, O](func: Option[TableColumn[I] => ExpressionColumn[O]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arrayMax[I, O]( + func: Option[TableColumn[I] => ExpressionColumn[O]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArrayMax(func, None, None, array) - def arrayMax2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arrayMax2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArrayMax(None, Option(func), None, array1, array2) - def arrayMax3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arrayMax3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArrayMax(None, None, Option(func), array1, array2, array3) - def arrayMin[I, O](func: Option[TableColumn[I] => ExpressionColumn[O]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arrayMin[I, O]( + func: Option[TableColumn[I] => ExpressionColumn[O]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArrayMin(func, None, None, array) - def arrayMin2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arrayMin2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArrayMin(None, Option(func), None, array1, array2) - def arrayMin3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arrayMin3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArrayMin(None, None, Option(func), array1, array2, array3) - def arrayReverseFill[I](func: TableColumn[I] => ExpressionColumn[Boolean], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayReverseFill[I]( + func: TableColumn[I] => ExpressionColumn[Boolean], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayReverseFill(Option(func), None, None, array) - def arrayReverseFill2[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayReverseFill2[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayReverseFill(None, Option(func), None, array1, array2) - def arrayReverseFill3[I](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[I]] = + def arrayReverseFill3[I]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[I]] = ArrayReverseFill(None, None, Option(func), array1, array2, array3) - def arrayReverseSort[I, O](func: Option[TableColumn[I] => ExpressionColumn[O]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayReverseSort[I, O]( + func: Option[TableColumn[I] => ExpressionColumn[O]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayReverseSort(func, None, None, array) - def arrayReverseSort2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayReverseSort2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayReverseSort(None, Option(func), None, array1, array2) - def arrayReverseSort3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arrayReverseSort3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArrayReverseSort(None, None, Option(func), array1, array2, array3) - def arrayReverseSplit[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[Iterable[I]]] = + def arrayReverseSplit[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[Iterable[I]]] = ArrayReverseSplit(func, array1, array2) - def arraySort[I, O](func: Option[TableColumn[I] => ExpressionColumn[O]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arraySort[I, O]( + func: Option[TableColumn[I] => ExpressionColumn[O]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArraySort(func, None, None, array) - def arraySort2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arraySort2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArraySort(None, Option(func), None, array1, array2) - def arraySort3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[O]] = + def arraySort3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[O]] = ArraySort(None, None, Option(func), array1, array2, array3) - def arraySplit[I](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[Iterable[Iterable[I]]] = + def arraySplit[I]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[Boolean], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[Iterable[Iterable[I]]] = ArraySplit(func, array1, array2) - def arraySum[I, O](func: Option[TableColumn[I] => ExpressionColumn[O]], - array: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arraySum[I, O]( + func: Option[TableColumn[I] => ExpressionColumn[O]], + array: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArraySum(func, None, None, array) - def arraySum2[I, O](func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arraySum2[I, O]( + func: (TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArraySum(None, Option(func), None, array1, array2) - def arraySum3[I, O](func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], - array1: ArrayColMagnet[_ <: Iterable[I]], - array2: ArrayColMagnet[_ <: Iterable[I]], - array3: ArrayColMagnet[_ <: Iterable[I]]): ExpressionColumn[O] = + def arraySum3[I, O]( + func: (TableColumn[I], TableColumn[I], TableColumn[I]) => ExpressionColumn[O], + array1: ArrayColMagnet[_ <: Iterable[I]], + array2: ArrayColMagnet[_ <: Iterable[I]], + array3: ArrayColMagnet[_ <: Iterable[I]] + ): ExpressionColumn[O] = ArraySum(None, None, Option(func), array1, array2, array3) } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/IPFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/IPFunctions.scala index cb2be5e2..12569764 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/IPFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/IPFunctions.scala @@ -22,5 +22,5 @@ IPv4StringToNum(s) IPv4NumToStringClassC(num) IPv6NumToString(x) IPv6StringToNum(s) - */ + */ } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/InFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/InFunctions.scala index b22dba9d..a48f84c8 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/InFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/InFunctions.scala @@ -15,9 +15,9 @@ trait InFunctions { self: Magnets => case class GlobalIn(_l: ConstOrColMagnet[_], _r: InFuncRHMagnet) extends InFunctionCol(_l, _r) case class GlobalNotIn(_l: ConstOrColMagnet[_], _r: InFuncRHMagnet) extends InFunctionCol(_l, _r) - //FIXME: we lose types here, + // FIXME: we lose types here, // is there anything that could properly represent the inner types of these column functions? - //This is especially problematic when using TupleElement + // This is especially problematic when using TupleElement case class Tuple(coln: Seq[ConstOrColMagnet[_]]) extends ExpressionColumn[Nothing](EmptyColumn) with InFunction case class TupleElement[T](tuple: Tuple, index: NumericCol[_]) diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/JsonFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/JsonFunctions.scala index 3b04c7cf..8d8d0426 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/JsonFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/JsonFunctions.scala @@ -3,23 +3,37 @@ package com.crobox.clickhouse.dsl.column import com.crobox.clickhouse.dsl.ExpressionColumn trait JsonFunctions { self: Magnets => - abstract class JsonFunction[T](val params: StringColMagnet[_], val fieldName: StringColMagnet[_]) extends ExpressionColumn[T](params.column) + abstract class JsonFunction[T](val params: StringColMagnet[_], val fieldName: StringColMagnet[_]) + extends ExpressionColumn[T](params.column) - case class VisitParamHas(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) extends JsonFunction[Boolean](_params, _fieldName) - case class VisitParamExtractUInt(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) extends JsonFunction[Long](_params, _fieldName) - case class VisitParamExtractInt(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) extends JsonFunction[Long](_params, _fieldName) - case class VisitParamExtractFloat(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) extends JsonFunction[Float](_params, _fieldName) - case class VisitParamExtractBool(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) extends JsonFunction[Boolean](_params, _fieldName) - case class VisitParamExtractRaw[T](_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) extends JsonFunction[T](_params, _fieldName) - case class VisitParamExtractString(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) extends JsonFunction[String](_params, _fieldName) + case class VisitParamHas(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) + extends JsonFunction[Boolean](_params, _fieldName) + case class VisitParamExtractUInt(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) + extends JsonFunction[Long](_params, _fieldName) + case class VisitParamExtractInt(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) + extends JsonFunction[Long](_params, _fieldName) + case class VisitParamExtractFloat(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) + extends JsonFunction[Float](_params, _fieldName) + case class VisitParamExtractBool(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) + extends JsonFunction[Boolean](_params, _fieldName) + case class VisitParamExtractRaw[T](_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) + extends JsonFunction[T](_params, _fieldName) + case class VisitParamExtractString(_params: StringColMagnet[_], _fieldName: StringColMagnet[_]) + extends JsonFunction[String](_params, _fieldName) - def visitParamHas(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamHas(params, fieldName) - def visitParamExtractUInt(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamExtractUInt(params, fieldName) - def visitParamExtractInt(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamExtractInt(params, fieldName) - def visitParamExtractFloat(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamExtractFloat(params, fieldName) - def visitParamExtractBool(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamExtractBool(params, fieldName) - def visitParamExtractRaw(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamExtractRaw(params, fieldName) - def visitParamExtractString(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamExtractString(params, fieldName) + def visitParamHas(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = VisitParamHas(params, fieldName) + def visitParamExtractUInt(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = + VisitParamExtractUInt(params, fieldName) + def visitParamExtractInt(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = + VisitParamExtractInt(params, fieldName) + def visitParamExtractFloat(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = + VisitParamExtractFloat(params, fieldName) + def visitParamExtractBool(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = + VisitParamExtractBool(params, fieldName) + def visitParamExtractRaw(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = + VisitParamExtractRaw(params, fieldName) + def visitParamExtractString(params: StringColMagnet[_], fieldName: StringColMagnet[_]) = + VisitParamExtractString(params, fieldName) /* visitParamHas(params, name) @@ -29,5 +43,5 @@ visitParamExtractFloat(params, name) visitParamExtractBool(params, name) visitParamExtractRaw(params, name) visitParamExtractString(params, name) - */ + */ } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/LogicalFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/LogicalFunctions.scala index 4472303e..d24238db 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/LogicalFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/LogicalFunctions.scala @@ -28,7 +28,7 @@ trait LogicalFunctions { this: Magnets => } } - //Reference with another name to allow to use it in the trait + // Reference with another name to allow to use it in the trait private def _and = and _ private def _or = or _ private def _xor = xor _ @@ -65,6 +65,6 @@ trait LogicalFunctions { this: Magnets => col.asOption match { case Some(Const(true)) => Const(false) case Some(Const(false)) => Const(true) - case _ => LogicalFunction(col, Not, col) //Needs both right and left for the tokenizer + case _ => LogicalFunction(col, Not, col) // Needs both right and left for the tokenizer } } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/Magnets.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/Magnets.scala index 67e68ccd..5d1c081d 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/Magnets.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/Magnets.scala @@ -25,8 +25,8 @@ trait Magnets { /** * Magnet pattern * - * The pattern provides implicit conversion to wrapper classes, - * this allows the DSL to accept multiple compatible column types in a single function. + * The pattern provides implicit conversion to wrapper classes, this allows the DSL to accept multiple compatible + * column types in a single function. */ trait Magnet[+C] { val column: TableColumn[C] @@ -63,8 +63,7 @@ trait Magnets { } /** - * Any constant or column. - * Sidenote: The current implementation doesn't represent collections. + * Any constant or column. Sidenote: The current implementation doesn't represent collections. */ trait ConstOrColMagnet[+C] extends Magnet[C] with ScalaBooleanFunctionOps with InOps with NullableOps diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MathematicalFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MathematicalFunctions.scala index 58a1e14d..d56a5e54 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MathematicalFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MathematicalFunctions.scala @@ -6,51 +6,51 @@ trait MathematicalFunctions { self: Magnets => sealed abstract class MathFuncColumn(col: Column) extends ExpressionColumn[Double](col) - abstract class MathConst extends MathFuncColumn(EmptyColumn) - abstract class MathTransformation(val numericCol: NumericCol[_]) extends MathFuncColumn(numericCol.column) + abstract class MathConst extends MathFuncColumn(EmptyColumn) + abstract class MathTransformation(val numericCol: NumericCol[_]) extends MathFuncColumn(numericCol.column) case class E() extends MathConst() case class Pi() extends MathConst() - case class Exp(col: NumericCol[_]) extends MathTransformation(col) - case class Log(col: NumericCol[_]) extends MathTransformation(col) - case class Exp2(col: NumericCol[_]) extends MathTransformation(col) - case class Log2(col: NumericCol[_]) extends MathTransformation(col) - case class Exp10(col: NumericCol[_]) extends MathTransformation(col) - case class Log10(col: NumericCol[_]) extends MathTransformation(col) - case class Sqrt(col: NumericCol[_]) extends MathTransformation(col) - case class Cbrt(col: NumericCol[_]) extends MathTransformation(col) - case class Erf(col: NumericCol[_]) extends MathTransformation(col) - case class Erfc(col: NumericCol[_]) extends MathTransformation(col) - case class Lgamma(col: NumericCol[_]) extends MathTransformation(col) - case class Tgamma(col: NumericCol[_]) extends MathTransformation(col) - case class Sin(col: NumericCol[_]) extends MathTransformation(col) - case class Cos(col: NumericCol[_]) extends MathTransformation(col) - case class Tan(col: NumericCol[_]) extends MathTransformation(col) - case class Asin(col: NumericCol[_]) extends MathTransformation(col) - case class Acos(col: NumericCol[_]) extends MathTransformation(col) - case class Atan(col: NumericCol[_]) extends MathTransformation(col) + case class Exp(col: NumericCol[_]) extends MathTransformation(col) + case class Log(col: NumericCol[_]) extends MathTransformation(col) + case class Exp2(col: NumericCol[_]) extends MathTransformation(col) + case class Log2(col: NumericCol[_]) extends MathTransformation(col) + case class Exp10(col: NumericCol[_]) extends MathTransformation(col) + case class Log10(col: NumericCol[_]) extends MathTransformation(col) + case class Sqrt(col: NumericCol[_]) extends MathTransformation(col) + case class Cbrt(col: NumericCol[_]) extends MathTransformation(col) + case class Erf(col: NumericCol[_]) extends MathTransformation(col) + case class Erfc(col: NumericCol[_]) extends MathTransformation(col) + case class Lgamma(col: NumericCol[_]) extends MathTransformation(col) + case class Tgamma(col: NumericCol[_]) extends MathTransformation(col) + case class Sin(col: NumericCol[_]) extends MathTransformation(col) + case class Cos(col: NumericCol[_]) extends MathTransformation(col) + case class Tan(col: NumericCol[_]) extends MathTransformation(col) + case class Asin(col: NumericCol[_]) extends MathTransformation(col) + case class Acos(col: NumericCol[_]) extends MathTransformation(col) + case class Atan(col: NumericCol[_]) extends MathTransformation(col) case class Pow(x: NumericCol[_], y: NumericCol[_]) extends MathTransformation(x) - def e() = E() - def pi() = Pi() - def exp(col: NumericCol[_]) = Exp(col) - def log(col: NumericCol[_]) = Log(col) - def exp2(col: NumericCol[_]) = Exp2(col) - def log2(col: NumericCol[_]) = Log2(col) - def exp10(col: NumericCol[_]) = Exp10(col) - def log10(col: NumericCol[_]) = Log10(col) - def sqrt(col: NumericCol[_]) = Sqrt(col) - def cbrt(col: NumericCol[_]) = Cbrt(col) - def erf(col: NumericCol[_]) = Erf(col) - def erfc(col: NumericCol[_]) = Erfc(col) - def lgamma(col: NumericCol[_]) = Lgamma(col) - def tgamma(col: NumericCol[_]) = Tgamma(col) - def sin(col: NumericCol[_]) = Sin(col) - def cos(col: NumericCol[_]) = Cos(col) - def tan(col: NumericCol[_]) = Tan(col) - def asin(col: NumericCol[_]) = Asin(col) - def acos(col: NumericCol[_]) = Acos(col) - def atan(col: NumericCol[_]) = Atan(col) + def e() = E() + def pi() = Pi() + def exp(col: NumericCol[_]) = Exp(col) + def log(col: NumericCol[_]) = Log(col) + def exp2(col: NumericCol[_]) = Exp2(col) + def log2(col: NumericCol[_]) = Log2(col) + def exp10(col: NumericCol[_]) = Exp10(col) + def log10(col: NumericCol[_]) = Log10(col) + def sqrt(col: NumericCol[_]) = Sqrt(col) + def cbrt(col: NumericCol[_]) = Cbrt(col) + def erf(col: NumericCol[_]) = Erf(col) + def erfc(col: NumericCol[_]) = Erfc(col) + def lgamma(col: NumericCol[_]) = Lgamma(col) + def tgamma(col: NumericCol[_]) = Tgamma(col) + def sin(col: NumericCol[_]) = Sin(col) + def cos(col: NumericCol[_]) = Cos(col) + def tan(col: NumericCol[_]) = Tan(col) + def asin(col: NumericCol[_]) = Asin(col) + def acos(col: NumericCol[_]) = Acos(col) + def atan(col: NumericCol[_]) = Atan(col) def pow(x: NumericCol[_], y: NumericCol[_]) = Pow(x, y) } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctions.scala index a9315921..0e7caf52 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/MiscellaneousFunctions.scala @@ -16,69 +16,80 @@ trait MiscellaneousFunctions { self: Magnets => case class BlockSize() extends MiscellaneousConst[Long]() case class Materialize(col: ConstOrColMagnet[_]) extends MiscellaneousOp[Long](col) case class Ignore(coln: ConstOrColMagnet[_]*) extends MiscellaneousConst[Long]() - case class Sleep(col: NumericCol[_]) extends MiscellaneousOp[Long](col.column) //is this an operator? + case class Sleep(col: NumericCol[_]) extends MiscellaneousOp[Long](col.column) // is this an operator? case class CurrentDatabase() extends MiscellaneousConst[String]() case class IsFinite(col: NumericCol[_]) extends MiscellaneousOp[Boolean](col.column) case class IsInfinite(col: NumericCol[_]) extends MiscellaneousOp[Boolean](col.column) case class IsNaN(col: NumericCol[_]) extends MiscellaneousOp[Boolean](col.column) - case class HasColumnInTable(database: StringColMagnet[_], - table: StringColMagnet[_], - column: StringColMagnet[_], - hostName: Option[StringColMagnet[_]] = None, - userName: Option[StringColMagnet[_]] = None, - passWord: Option[StringColMagnet[_]] = None) - extends MiscellaneousConst[Boolean]() - case class Bar(col: NumericCol[_], from: NumericCol[_], to: NumericCol[_], default: Option[NumericCol[_]]) extends MiscellaneousOp[String](col.column) - case class Transform[L,R](col: ConstOrColMagnet[L], - arrayFrom: ArrayColMagnet[Iterable[L]], - arrayTo: ArrayColMagnet[Iterable[R]], - default: ConstOrColMagnet[R]) - extends MiscellaneousOp[Long](col) - case class FormatReadableSize(col: NumericCol[_]) extends MiscellaneousOp[String](col.column) + case class HasColumnInTable( + database: StringColMagnet[_], + table: StringColMagnet[_], + column: StringColMagnet[_], + hostName: Option[StringColMagnet[_]] = None, + userName: Option[StringColMagnet[_]] = None, + passWord: Option[StringColMagnet[_]] = None + ) extends MiscellaneousConst[Boolean]() + case class Bar(col: NumericCol[_], from: NumericCol[_], to: NumericCol[_], default: Option[NumericCol[_]]) + extends MiscellaneousOp[String](col.column) + case class Transform[L, R]( + col: ConstOrColMagnet[L], + arrayFrom: ArrayColMagnet[Iterable[L]], + arrayTo: ArrayColMagnet[Iterable[R]], + default: ConstOrColMagnet[R] + ) extends MiscellaneousOp[Long](col) + case class FormatReadableSize(col: NumericCol[_]) extends MiscellaneousOp[String](col.column) case class Least(a: ConstOrColMagnet[_], b: ConstOrColMagnet[_]) extends MiscellaneousOp[Long](a) case class Greatest(a: ConstOrColMagnet[_], b: ConstOrColMagnet[_]) extends MiscellaneousOp[Long](a) - case class Uptime() extends MiscellaneousConst[Long]() - case class Version() extends MiscellaneousConst[Long]() - case class RowNumberInAllBlocks() extends MiscellaneousConst[Long]() - case class RunningDifference(col: ConstOrColMagnet[_]) extends MiscellaneousOp[Long](col) - case class MACNumToString(col: NumericCol[_]) extends MiscellaneousOp[String](col.column) - case class MACStringToNum(col: StringColMagnet[_]) extends MiscellaneousOp[Long](col.column) - case class MACStringToOUI(col: StringColMagnet[_]) extends MiscellaneousOp[Long](col.column) + case class Uptime() extends MiscellaneousConst[Long]() + case class Version() extends MiscellaneousConst[Long]() + case class RowNumberInAllBlocks() extends MiscellaneousConst[Long]() + case class RunningDifference(col: ConstOrColMagnet[_]) extends MiscellaneousOp[Long](col) + case class MACNumToString(col: NumericCol[_]) extends MiscellaneousOp[String](col.column) + case class MACStringToNum(col: StringColMagnet[_]) extends MiscellaneousOp[Long](col.column) + case class MACStringToOUI(col: StringColMagnet[_]) extends MiscellaneousOp[Long](col.column) - def hostName() = HostName() + def hostName() = HostName() def visibleWidth(col: ConstOrColMagnet[_]) = VisibleWidth(col) def toTypeName(col: ConstOrColMagnet[_]) = ToTypeName(col) - def blockSize() = BlockSize() + def blockSize() = BlockSize() def materialize(col: ConstOrColMagnet[_]) = Materialize(col) def ignore(coln: ConstOrColMagnet[_]*) = Ignore(coln: _*) def sleep(col: NumericCol[_]) = Sleep(col: NumericCol[_]) - def currentDatabase() = CurrentDatabase() - def isFinite[O](col: NumericCol[O]) = IsFinite(col) + def currentDatabase() = CurrentDatabase() + def isFinite[O](col: NumericCol[O]) = IsFinite(col) def isInfinite(col: NumericCol[_]) = IsInfinite(col) def isNaN(col: NumericCol[_]) = IsNaN(col: NumericCol[_]) - def hasColumnInTable(database: StringColMagnet[_], - table: StringColMagnet[_], - column: StringColMagnet[_], - hostName: Option[StringColMagnet[_]] = None, - userName: Option[StringColMagnet[_]] = None, - passWord: Option[StringColMagnet[_]] = None) = + def hasColumnInTable( + database: StringColMagnet[_], + table: StringColMagnet[_], + column: StringColMagnet[_], + hostName: Option[StringColMagnet[_]] = None, + userName: Option[StringColMagnet[_]] = None, + passWord: Option[StringColMagnet[_]] = None + ) = HasColumnInTable(database, table, column, hostName, userName, passWord) - def bar(col: NumericCol[_],from: NumericCol[_],to: NumericCol[_],default: Option[NumericCol[_]]) = Bar(col, from, to, default) + def bar(col: NumericCol[_], from: NumericCol[_], to: NumericCol[_], default: Option[NumericCol[_]]) = + Bar(col, from, to, default) - def transform[L,R](col: ConstOrColMagnet[L], arrayFrom: ArrayColMagnet[Iterable[L]], arrayTo: ArrayColMagnet[Iterable[R]], default: ConstOrColMagnet[R]) = - Transform[L,R](col, arrayFrom, arrayTo, default) - def formatReadableSize(col: NumericCol[_]) = FormatReadableSize(col) + def transform[L, R]( + col: ConstOrColMagnet[L], + arrayFrom: ArrayColMagnet[Iterable[L]], + arrayTo: ArrayColMagnet[Iterable[R]], + default: ConstOrColMagnet[R] + ) = + Transform[L, R](col, arrayFrom, arrayTo, default) + def formatReadableSize(col: NumericCol[_]) = FormatReadableSize(col) def least(a: ConstOrColMagnet[_], b: ConstOrColMagnet[_]) = Least(a: ConstOrColMagnet[_], b) def greatest(a: ConstOrColMagnet[_], b: ConstOrColMagnet[_]) = Greatest(a: ConstOrColMagnet[_], b) - def uptime() = Uptime() - def version() = Version() - def rowNumberInAllBlocks() = RowNumberInAllBlocks() - def runningDifference(col: ConstOrColMagnet[_]) = RunningDifference(col) - def mACNumToString(col: NumericCol[_]) = MACNumToString(col) - def mACStringToNum(col: StringColMagnet[_]) = MACStringToNum(col) - def mACStringToOUI(col: StringColMagnet[_]) = MACStringToOUI(col) + def uptime() = Uptime() + def version() = Version() + def rowNumberInAllBlocks() = RowNumberInAllBlocks() + def runningDifference(col: ConstOrColMagnet[_]) = RunningDifference(col) + def mACNumToString(col: NumericCol[_]) = MACNumToString(col) + def mACStringToNum(col: StringColMagnet[_]) = MACStringToNum(col) + def mACStringToOUI(col: StringColMagnet[_]) = MACStringToOUI(col) /* hostName() @@ -108,5 +119,5 @@ trait MiscellaneousFunctions { self: Magnets => arrayJoin tuple - */ + */ } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RandomFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RandomFunctions.scala index 940b9179..888c7c62 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RandomFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RandomFunctions.scala @@ -6,13 +6,13 @@ trait RandomFunctions { self: Magnets => abstract class RandomFunction() extends ExpressionColumn[Long](EmptyColumn) - case class Rand() extends RandomFunction + case class Rand() extends RandomFunction case class Rand64() extends RandomFunction - def rand() = Rand() + def rand() = Rand() def rand64() = Rand64() -/* + /* rand rand64 - */ + */ } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RoundingFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RoundingFunctions.scala index f6beeab2..a002c71d 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RoundingFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/RoundingFunctions.scala @@ -6,25 +6,25 @@ trait RoundingFunctions { self: Magnets => abstract class RoundingFunction(col: NumericCol[_]) extends ExpressionColumn[Long](col.column) case class Floor(col: NumericCol[_], n: NumericCol[_]) extends RoundingFunction(col) - case class Ceil(col: NumericCol[_], n: NumericCol[_]) extends RoundingFunction(col) + case class Ceil(col: NumericCol[_], n: NumericCol[_]) extends RoundingFunction(col) case class Round(col: NumericCol[_], n: NumericCol[_]) extends RoundingFunction(col) - case class RoundToExp2(col: NumericCol[_]) extends RoundingFunction(col) - case class RoundDuration(col: NumericCol[_]) extends RoundingFunction(col) - case class RoundAge(col: NumericCol[_]) extends RoundingFunction(col) + case class RoundToExp2(col: NumericCol[_]) extends RoundingFunction(col) + case class RoundDuration(col: NumericCol[_]) extends RoundingFunction(col) + case class RoundAge(col: NumericCol[_]) extends RoundingFunction(col) def floor(col: NumericCol[_], n: NumericCol[_]) = Floor(col, n) - def ceil(col: NumericCol[_], n: NumericCol[_]) = Ceil(col, n) + def ceil(col: NumericCol[_], n: NumericCol[_]) = Ceil(col, n) def round(col: NumericCol[_], n: NumericCol[_]) = Round(col, n) - def roundToExp2(col: NumericCol[_]) = RoundToExp2(col) - def roundDuration(col: NumericCol[_]) = RoundDuration(col) - def roundAge(col: NumericCol[_]) = RoundAge(col) + def roundToExp2(col: NumericCol[_]) = RoundToExp2(col) + def roundDuration(col: NumericCol[_]) = RoundDuration(col) + def roundAge(col: NumericCol[_]) = RoundAge(col) -/* + /* floor(x[, N]) ceil(x[, N]) round(x[, N]) roundToExp2(num) roundDuration(num) roundAge(num) - */ + */ } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ScalaStringFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ScalaStringFunctions.scala index 177369cb..a7683cb0 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ScalaStringFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/ScalaStringFunctions.scala @@ -8,16 +8,19 @@ trait ScalaStringFunctions { self: StringFunctions with StringSearchFunctions wi trait ScalaStringFunctionOps { self: StringSearchOps with StringOps with StringColMagnet[_] => - def startsWithAnyOf[S](others: Seq[S], - caseInsensitive: Boolean)(implicit ev: S => StringColMagnet[_]): TableColumn[Boolean] = + def startsWithAnyOf[S](others: Seq[S], caseInsensitive: Boolean)(implicit + ev: S => StringColMagnet[_] + ): TableColumn[Boolean] = if (caseInsensitive) iStartsWithAnyOf(others) else startsWithAnyOf(others) - def endsWithAnyOf[S](others: Seq[S], - caseInsensitive: Boolean)(implicit ev: S => StringColMagnet[_]): TableColumn[Boolean] = + def endsWithAnyOf[S](others: Seq[S], caseInsensitive: Boolean)(implicit + ev: S => StringColMagnet[_] + ): TableColumn[Boolean] = if (caseInsensitive) iEndsWithAnyOf(others) else endsWithAnyOf(others) - def containsAnyOf[S](others: Iterable[S], - caseInsensitive: Boolean)(implicit ev: S => StringColMagnet[_]): TableColumn[Boolean] = + def containsAnyOf[S](others: Iterable[S], caseInsensitive: Boolean)(implicit + ev: S => StringColMagnet[_] + ): TableColumn[Boolean] = if (caseInsensitive) iContainsAnyOf(others) else containsAnyOf(others) def startsWith(other: StringColMagnet[_], caseInsensitive: Boolean): TableColumn[Boolean] = diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/StringSearchFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/StringSearchFunctions.scala index e6e8fb2c..ebdd002e 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/StringSearchFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/StringSearchFunctions.scala @@ -5,10 +5,11 @@ import com.crobox.clickhouse.dsl.ExpressionColumn trait StringSearchFunctions { self: Magnets => abstract class StringSearchFunc[+V](val col1: StringColMagnet[_], val col2: StringColMagnet[_]) extends ExpressionColumn[V](col1.column) - abstract class StringSearchReplaceFunc(col1: StringColMagnet[_], - col2: StringColMagnet[_], - val replace: StringColMagnet[_]) - extends StringSearchFunc[String](col1, col2) + abstract class StringSearchReplaceFunc( + col1: StringColMagnet[_], + col2: StringColMagnet[_], + val replace: StringColMagnet[_] + ) extends StringSearchFunc[String](col1, col2) case class Position(col: StringColMagnet[_], needle: StringColMagnet[_], caseSensitive: Boolean = true) extends StringSearchFunc[Long](col, needle) @@ -65,13 +66,17 @@ trait StringSearchFunctions { self: Magnets => def replaceAll(col: StringColMagnet[_], pattern: StringColMagnet[_], replacement: StringColMagnet[_]): ReplaceAll = ReplaceAll(col, pattern, replacement) - def replaceRegexpOne(col: StringColMagnet[_], - pattern: StringColMagnet[_], - replacement: StringColMagnet[_]): ReplaceRegexpOne = ReplaceRegexpOne(col, pattern, replacement) - - def replaceRegexpAll(col: StringColMagnet[_], - pattern: StringColMagnet[_], - replacement: StringColMagnet[_]): ReplaceRegexpAll = ReplaceRegexpAll(col, pattern, replacement) + def replaceRegexpOne( + col: StringColMagnet[_], + pattern: StringColMagnet[_], + replacement: StringColMagnet[_] + ): ReplaceRegexpOne = ReplaceRegexpOne(col, pattern, replacement) + + def replaceRegexpAll( + col: StringColMagnet[_], + pattern: StringColMagnet[_], + replacement: StringColMagnet[_] + ): ReplaceRegexpAll = ReplaceRegexpAll(col, pattern, replacement) trait StringSearchOps { self: StringColMagnet[_] => diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/TypeCastFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/TypeCastFunctions.scala index 89f8f2d3..c22bb8e5 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/TypeCastFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/TypeCastFunctions.scala @@ -9,108 +9,121 @@ trait TypeCastFunctions { self: Magnets => abstract class TypeCastColumn[V](val targetColumn: ConstOrColMagnet[_]) - extends ExpressionColumn[V](targetColumn.column) + extends ExpressionColumn[V](targetColumn.column) case class Reinterpret[V](typeCastColumn: TypeCastColumn[_] with Reinterpretable) - extends TypeCastColumn[V](typeCastColumn.targetColumn) + extends TypeCastColumn[V](typeCastColumn.targetColumn) - //Tagging of compatible + // Tagging of compatible sealed trait Reinterpretable // @todo Unsigned types are basically not supported. For now keep the same as signed types - case class UInt8(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Byte] = None, - orNull: Boolean = false) - extends TypeCastColumn[Byte](tableColumn) + case class UInt8( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Byte] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Byte](tableColumn) with Reinterpretable - case class UInt16(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Short] = None, - orNull: Boolean = false) - extends TypeCastColumn[Short](tableColumn) + case class UInt16( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Short] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Short](tableColumn) with Reinterpretable - case class UInt32(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Int] = None, - orNull: Boolean = false) - extends TypeCastColumn[Int](tableColumn) + case class UInt32( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Int] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Int](tableColumn) with Reinterpretable - case class UInt64(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Long] = None, - orNull: Boolean = false) - extends TypeCastColumn[Long](tableColumn) + case class UInt64( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Long] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Long](tableColumn) with Reinterpretable - case class Int8(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Byte] = None, - orNull: Boolean = false) - extends TypeCastColumn[Byte](tableColumn) + case class Int8( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Byte] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Byte](tableColumn) with Reinterpretable - case class Int16(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Short] = None, - orNull: Boolean = false) - extends TypeCastColumn[Short](tableColumn) + case class Int16( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Short] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Short](tableColumn) with Reinterpretable - case class Int32(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Int] = None, - orNull: Boolean = false) - extends TypeCastColumn[Int](tableColumn) + case class Int32( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Int] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Int](tableColumn) with Reinterpretable - case class Int64(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Long] = None, - orNull: Boolean = false) - extends TypeCastColumn[Long](tableColumn) + case class Int64( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Long] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Long](tableColumn) with Reinterpretable - case class Float32(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Float] = None, - orNull: Boolean = false) - extends TypeCastColumn[Float](tableColumn) + case class Float32( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Float] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Float](tableColumn) with Reinterpretable - case class Float64(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Double] = None, - orNull: Boolean = false) - extends TypeCastColumn[Double](tableColumn) + case class Float64( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Double] = None, + orNull: Boolean = false + ) extends TypeCastColumn[Double](tableColumn) with Reinterpretable - case class Uuid(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[Uuid] = None, - orNull: Boolean = false) - extends TypeCastColumn[java.util.UUID](tableColumn) + case class Uuid( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[Uuid] = None, + orNull: Boolean = false + ) extends TypeCastColumn[java.util.UUID](tableColumn) with Reinterpretable - case class DateRep(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[DateTime] = None, - orNull: Boolean = false) - extends TypeCastColumn[org.joda.time.LocalDate](tableColumn) + case class DateRep( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[DateTime] = None, + orNull: Boolean = false + ) extends TypeCastColumn[org.joda.time.LocalDate](tableColumn) with Reinterpretable - case class DateTimeRep(tableColumn: ConstOrColMagnet[_], - orZero: Boolean = false, - orDefault: Option[org.joda.time.DateTime] = None, - orNull: Boolean = false) - extends TypeCastColumn[org.joda.time.DateTime](tableColumn) + case class DateTimeRep( + tableColumn: ConstOrColMagnet[_], + orZero: Boolean = false, + orDefault: Option[org.joda.time.DateTime] = None, + orNull: Boolean = false + ) extends TypeCastColumn[org.joda.time.DateTime](tableColumn) with Reinterpretable case class StringRep(tableColumn: ConstOrColMagnet[_]) - extends TypeCastColumn[String](tableColumn) + extends TypeCastColumn[String](tableColumn) with Reinterpretable sealed trait CastOutBind[I, O] @@ -273,7 +286,7 @@ trait TypeCastFunctions { def reinterpret[V](typeCastColumn: TypeCastColumn[_] with Reinterpretable): Reinterpret[V] = Reinterpret[V](typeCastColumn) - def cast[T <: SimpleColumnType, O](tableColumn: ConstOrColMagnet[_], simpleColumnType: T)( - implicit castOut: CastOutBind[T, O] + def cast[T <: SimpleColumnType, O](tableColumn: ConstOrColMagnet[_], simpleColumnType: T)(implicit + castOut: CastOutBind[T, O] ): Cast[O] = Cast[O](tableColumn, simpleColumnType) } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/URLFunctions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/URLFunctions.scala index afc4e945..8bb46b0a 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/URLFunctions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/column/URLFunctions.scala @@ -4,59 +4,57 @@ import com.crobox.clickhouse.dsl.ExpressionColumn trait URLFunctions { self: Magnets => sealed abstract class URLFunction[V](val urlColumn: StringColMagnet[_]) extends ExpressionColumn[V](urlColumn.column) - abstract class URLStrFunction(col: StringColMagnet[_]) extends URLFunction[String](col) - abstract class URLArrFunction(col: StringColMagnet[_]) extends URLFunction[Seq[String]](col) + abstract class URLStrFunction(col: StringColMagnet[_]) extends URLFunction[String](col) + abstract class URLArrFunction(col: StringColMagnet[_]) extends URLFunction[Seq[String]](col) - case class Protocol(col: StringColMagnet[_]) extends URLStrFunction(col) - case class Domain(col: StringColMagnet[_]) extends URLStrFunction(col) - case class DomainWithoutWWW(col: StringColMagnet[_]) extends URLStrFunction(col) - case class TopLevelDomain(col: StringColMagnet[_]) extends URLStrFunction(col) - case class FirstSignificantSubdomain(col: StringColMagnet[_]) extends URLStrFunction(col) - case class CutToFirstSignificantSubdomain(col: StringColMagnet[_]) extends URLStrFunction(col) - case class Path(col: StringColMagnet[_]) extends URLStrFunction(col) - case class PathFull(col: StringColMagnet[_]) extends URLStrFunction(col) - case class QueryString(col: StringColMagnet[_]) extends URLStrFunction(col) - case class Fragment(col: StringColMagnet[_]) extends URLStrFunction(col) - case class QueryStringAndFragment(col: StringColMagnet[_]) extends URLStrFunction(col) + case class Protocol(col: StringColMagnet[_]) extends URLStrFunction(col) + case class Domain(col: StringColMagnet[_]) extends URLStrFunction(col) + case class DomainWithoutWWW(col: StringColMagnet[_]) extends URLStrFunction(col) + case class TopLevelDomain(col: StringColMagnet[_]) extends URLStrFunction(col) + case class FirstSignificantSubdomain(col: StringColMagnet[_]) extends URLStrFunction(col) + case class CutToFirstSignificantSubdomain(col: StringColMagnet[_]) extends URLStrFunction(col) + case class Path(col: StringColMagnet[_]) extends URLStrFunction(col) + case class PathFull(col: StringColMagnet[_]) extends URLStrFunction(col) + case class QueryString(col: StringColMagnet[_]) extends URLStrFunction(col) + case class Fragment(col: StringColMagnet[_]) extends URLStrFunction(col) + case class QueryStringAndFragment(col: StringColMagnet[_]) extends URLStrFunction(col) case class ExtractURLParameter(col: StringColMagnet[_], param: StringColMagnet[_]) extends URLStrFunction(col) - case class ExtractURLParameters(col: StringColMagnet[_]) extends URLArrFunction(col) - case class ExtractURLParameterNames(col: StringColMagnet[_]) extends URLArrFunction(col) - case class URLHierarchy(col: StringColMagnet[_]) extends URLArrFunction(col) - case class URLPathHierarchy(col: StringColMagnet[_]) extends URLArrFunction(col) - case class DecodeURLComponent(col: StringColMagnet[_]) extends URLStrFunction(col) - case class CutWWW(col: StringColMagnet[_]) extends URLStrFunction(col) - case class CutQueryString(col: StringColMagnet[_]) extends URLStrFunction(col) - case class CutFragment(col: StringColMagnet[_]) extends URLStrFunction(col) - case class CutQueryStringAndFragment(col: StringColMagnet[_]) extends URLStrFunction(col) + case class ExtractURLParameters(col: StringColMagnet[_]) extends URLArrFunction(col) + case class ExtractURLParameterNames(col: StringColMagnet[_]) extends URLArrFunction(col) + case class URLHierarchy(col: StringColMagnet[_]) extends URLArrFunction(col) + case class URLPathHierarchy(col: StringColMagnet[_]) extends URLArrFunction(col) + case class DecodeURLComponent(col: StringColMagnet[_]) extends URLStrFunction(col) + case class CutWWW(col: StringColMagnet[_]) extends URLStrFunction(col) + case class CutQueryString(col: StringColMagnet[_]) extends URLStrFunction(col) + case class CutFragment(col: StringColMagnet[_]) extends URLStrFunction(col) + case class CutQueryStringAndFragment(col: StringColMagnet[_]) extends URLStrFunction(col) case class CutURLParameter(col: StringColMagnet[_], parameter: StringColMagnet[_]) extends URLStrFunction(col) - def protocol(col: StringColMagnet[_]) = Protocol(col) - def domain(col: StringColMagnet[_]) = Domain(col) - def domainWithoutWWW(col: StringColMagnet[_]) = DomainWithoutWWW(col) - def topLevelDomain(col: StringColMagnet[_]) = TopLevelDomain(col) - def firstSignificantSubdomain(col: StringColMagnet[_]) = FirstSignificantSubdomain(col) - def cutToFirstSignificantSubdomain(col: StringColMagnet[_]) = CutToFirstSignificantSubdomain(col) - def path(col: StringColMagnet[_]) = Path(col) - def pathFull(col: StringColMagnet[_]) = PathFull(col) - def queryString(col: StringColMagnet[_]) = QueryString(col) - def fragment(col: StringColMagnet[_]) = Fragment(col) - def queryStringAndFragment(col: StringColMagnet[_]) = QueryStringAndFragment(col) + def protocol(col: StringColMagnet[_]) = Protocol(col) + def domain(col: StringColMagnet[_]) = Domain(col) + def domainWithoutWWW(col: StringColMagnet[_]) = DomainWithoutWWW(col) + def topLevelDomain(col: StringColMagnet[_]) = TopLevelDomain(col) + def firstSignificantSubdomain(col: StringColMagnet[_]) = FirstSignificantSubdomain(col) + def cutToFirstSignificantSubdomain(col: StringColMagnet[_]) = CutToFirstSignificantSubdomain(col) + def path(col: StringColMagnet[_]) = Path(col) + def pathFull(col: StringColMagnet[_]) = PathFull(col) + def queryString(col: StringColMagnet[_]) = QueryString(col) + def fragment(col: StringColMagnet[_]) = Fragment(col) + def queryStringAndFragment(col: StringColMagnet[_]) = QueryStringAndFragment(col) def extractURLParameter(col: StringColMagnet[_], param: StringColMagnet[_]) = ExtractURLParameter(col, param) - def extractURLParameters(col: StringColMagnet[_]) = ExtractURLParameters(col) - def extractURLParameterNames(col: StringColMagnet[_]) = ExtractURLParameterNames(col) - def uRLHierarchy(col: StringColMagnet[_]) = URLHierarchy(col) - def uRLPathHierarchy(col: StringColMagnet[_]) = URLPathHierarchy(col) - def decodeURLComponent(col: StringColMagnet[_]) = DecodeURLComponent(col) + def extractURLParameters(col: StringColMagnet[_]) = ExtractURLParameters(col) + def extractURLParameterNames(col: StringColMagnet[_]) = ExtractURLParameterNames(col) + def uRLHierarchy(col: StringColMagnet[_]) = URLHierarchy(col) + def uRLPathHierarchy(col: StringColMagnet[_]) = URLPathHierarchy(col) + def decodeURLComponent(col: StringColMagnet[_]) = DecodeURLComponent(col) - def cutWWW(col: StringColMagnet[_]) = CutWWW(col) - def cutQueryString(col: StringColMagnet[_]) = CutQueryString(col) - def cutFragment(col: StringColMagnet[_]) = CutFragment(col) - def cutQueryStringAndFragment(col: StringColMagnet[_]) = CutQueryStringAndFragment(col) + def cutWWW(col: StringColMagnet[_]) = CutWWW(col) + def cutQueryString(col: StringColMagnet[_]) = CutQueryString(col) + def cutFragment(col: StringColMagnet[_]) = CutFragment(col) + def cutQueryStringAndFragment(col: StringColMagnet[_]) = CutQueryStringAndFragment(col) def cutURLParameter(col: StringColMagnet[_], parameter: StringColMagnet[_]) = CutURLParameter(col, parameter) - - -/* + /* Functions that extract part of a URL protocol domain @@ -82,5 +80,5 @@ cutQueryString cutFragment cutQueryStringAndFragment cutURLParameter - */ + */ } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/ClickhouseQueryExecutor.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/ClickhouseQueryExecutor.scala index f5aabae9..b503259e 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/ClickhouseQueryExecutor.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/ClickhouseQueryExecutor.scala @@ -52,9 +52,7 @@ trait ClickhouseQueryExecutor extends QueryExecutor { )(implicit executionContext: ExecutionContext, settings: QuerySettings = QuerySettings()): Future[String] = Future { values.map(_.toJson.compactPrint).mkString("\n") + "\n" - }.flatMap( - entity => client.execute(s"INSERT INTO ${table.quoted} FORMAT JSONEachRow", entity)(settings) - ) + }.flatMap(entity => client.execute(s"INSERT INTO ${table.quoted} FORMAT JSONEachRow", entity)(settings)) } object ClickhouseQueryExecutor { diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/QueryExecutor.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/QueryExecutor.scala index b5dfccc7..f9e06d86 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/QueryExecutor.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/execution/QueryExecutor.scala @@ -15,13 +15,17 @@ trait QueryExecutor { self: TokenizerModule => def serverVersion: ClickhouseServerVersion - def query[V: JsonReader](sql: String)(implicit executionContext: ExecutionContext, - settings: QuerySettings = QuerySettings()): Future[QueryResult[V]] - - def execute[V: JsonReader](query: Query)(implicit executionContext: ExecutionContext, - settings: QuerySettings = QuerySettings()): Future[QueryResult[V]] - - def insert[V: JsonWriter](table: Table, values: Seq[V])(implicit executionContext: ExecutionContext, - settings: QuerySettings = QuerySettings()): Future[String] + def query[V: JsonReader]( + sql: String + )(implicit executionContext: ExecutionContext, settings: QuerySettings = QuerySettings()): Future[QueryResult[V]] + + def execute[V: JsonReader]( + query: Query + )(implicit executionContext: ExecutionContext, settings: QuerySettings = QuerySettings()): Future[QueryResult[V]] + + def insert[V: JsonWriter](table: Table, values: Seq[V])(implicit + executionContext: ExecutionContext, + settings: QuerySettings = QuerySettings() + ): Future[String] } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArithmeticFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArithmeticFunctionTokenizer.scala index eb7cd04f..3f1d69b3 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArithmeticFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArithmeticFunctionTokenizer.scala @@ -31,8 +31,9 @@ trait ArithmeticFunctionTokenizer { this: ClickhouseTokenizerModule => case s: Plus[_] => tokenizeWithOperator(s, "+") } - private def tokenizeWithOperator(col: ArithmeticFunctionOp[_], - operator: String)(implicit ctx: TokenizeContext): String = + private def tokenizeWithOperator(col: ArithmeticFunctionOp[_], operator: String)(implicit + ctx: TokenizeContext + ): String = tokenizeColumn(col.left.column) + " " + operator + " " + tokenizeColumn(col.right.column) private def tokenizeAsFunction(col: ArithmeticFunctionOp[_], fn: String)(implicit ctx: TokenizeContext): String = diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArrayFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArrayFunctionTokenizer.scala index 20ba7ecb..d89eb877 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArrayFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ArrayFunctionTokenizer.scala @@ -13,7 +13,7 @@ trait ArrayFunctionTokenizer { this: ClickhouseTokenizerModule => case EmptyArrayToSingle(col: ArrayColMagnet[_]) => s"emptyArrayToSingle(${tokenizeColumn(col.column)})" case Array(columns @ _*) => - s"[${tokenizeSeqCol(columns.map(_.column): _*)}]" //Array Creation Operator + s"[${tokenizeSeqCol(columns.map(_.column): _*)}]" // Array Creation Operator case ArrayConcat(col1: ArrayColMagnet[_], columns @ _*) => s"arrayConcat(${tokenizeSeqCol(col1.column, columns.map(_.column): _*)})" case ArrayElement(col: ArrayColMagnet[_], n: NumericCol[_]) => diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/DictionaryFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/DictionaryFunctionTokenizer.scala index 65a3b579..28c0cce3 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/DictionaryFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/DictionaryFunctionTokenizer.scala @@ -5,8 +5,9 @@ import com.crobox.clickhouse.dsl._ trait DictionaryFunctionTokenizer { self: ClickhouseTokenizerModule => - private def tokenizeDictionaryGet(col: DictionaryGetFuncColumn[_], - typeName: String)(implicit ctx: TokenizeContext): String = { + private def tokenizeDictionaryGet(col: DictionaryGetFuncColumn[_], typeName: String)(implicit + ctx: TokenizeContext + ): String = { val default = col.default .map(col => ctx.delimiter + tokenizeColumn(col.column)) .getOrElse("") @@ -17,20 +18,20 @@ trait DictionaryFunctionTokenizer { } def tokenizeDictionaryFunction(col: DictionaryFuncColumn[_])(implicit ctx: TokenizeContext): String = col match { - case col: DictGetUInt8 => tokenizeDictionaryGet(col, "UInt8") - case col: DictGetUInt16 => tokenizeDictionaryGet(col, "UInt16") - case col: DictGetUInt32 => tokenizeDictionaryGet(col, "UInt32") - case col: DictGetUInt64 => tokenizeDictionaryGet(col, "UInt64") - case col: DictGetInt8 => tokenizeDictionaryGet(col, "Int8") - case col: DictGetInt16 => tokenizeDictionaryGet(col, "Int16") - case col: DictGetInt32 => tokenizeDictionaryGet(col, "Int32") - case col: DictGetInt64 => tokenizeDictionaryGet(col, "Int64") - case col: DictGetFloat32 => tokenizeDictionaryGet(col, "Float32") - case col: DictGetFloat64 => tokenizeDictionaryGet(col, "Float64") - case col: DictGetDate => tokenizeDictionaryGet(col, "Date") + case col: DictGetUInt8 => tokenizeDictionaryGet(col, "UInt8") + case col: DictGetUInt16 => tokenizeDictionaryGet(col, "UInt16") + case col: DictGetUInt32 => tokenizeDictionaryGet(col, "UInt32") + case col: DictGetUInt64 => tokenizeDictionaryGet(col, "UInt64") + case col: DictGetInt8 => tokenizeDictionaryGet(col, "Int8") + case col: DictGetInt16 => tokenizeDictionaryGet(col, "Int16") + case col: DictGetInt32 => tokenizeDictionaryGet(col, "Int32") + case col: DictGetInt64 => tokenizeDictionaryGet(col, "Int64") + case col: DictGetFloat32 => tokenizeDictionaryGet(col, "Float32") + case col: DictGetFloat64 => tokenizeDictionaryGet(col, "Float64") + case col: DictGetDate => tokenizeDictionaryGet(col, "Date") case col: DictGetDateTime => tokenizeDictionaryGet(col, "DateTime") - case col: DictGetUUID => tokenizeDictionaryGet(col, "UUID") - case col: DictGetString => tokenizeDictionaryGet(col, "String") + case col: DictGetUUID => tokenizeDictionaryGet(col, "UUID") + case col: DictGetString => tokenizeDictionaryGet(col, "String") case DictIsIn(dictName: StringColMagnet[_], childId: ConstOrColMagnet[_], ancestorId: ConstOrColMagnet[_]) => s"dictIsIn(${tokenizeColumn(dictName.column)},${tokenizeColumn(childId.column)},${tokenizeColumn(ancestorId.column)})" case DictGetHierarchy(dictName: StringColMagnet[_], id: ConstOrColMagnet[_]) => diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/InFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/InFunctionTokenizer.scala index e1caa272..f2005065 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/InFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/InFunctionTokenizer.scala @@ -23,8 +23,9 @@ trait InFunctionTokenizer { s"${tokenizeColumn(l.column)} GLOBAL NOT IN ${tokenizeInFunRHCol(r, () => ctx)}" } - private def tokenizeInFunRHCol(value: InFuncRHMagnet, - fn: () => TokenizeContext)(implicit ctx: TokenizeContext): String = + private def tokenizeInFunRHCol(value: InFuncRHMagnet, fn: () => TokenizeContext)(implicit + ctx: TokenizeContext + ): String = value match { case col: InFuncRHMagnet if col.query.isDefined => s"(${toRawSql(col.query.get.internalQuery)(fn())})" diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizer.scala index 28a265bf..5a2783fb 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizer.scala @@ -14,31 +14,31 @@ trait LogicalFunctionTokenizer { col.operator match { case And => (surroundWithBrackets(left, col.operator), surroundWithBrackets(right, col.operator)) match { - case ("1", "1") => "1" // LEFT & RIGHT are true, AND succeeds + case ("1", "1") => "1" // LEFT & RIGHT are true, AND succeeds case ("1", rightClause) => rightClause // LEFT is true, only tokenize RIGHT - case ("0", _) => "0" // LEFT is false, AND fails - case (leftClause, "1") => leftClause // RIGHT is true, only tokenize LEFT - case (_, "0") => "0" // RIGHT is false, AND fails + case ("0", _) => "0" // LEFT is false, AND fails + case (leftClause, "1") => leftClause // RIGHT is true, only tokenize LEFT + case (_, "0") => "0" // RIGHT is false, AND fails case (leftClause, rightClause) => s"$leftClause AND $rightClause" } case Or => (surroundWithBrackets(left, col.operator), surroundWithBrackets(right, col.operator)) match { - case ("0", "0") => "0" // LEFT & RIGHT are false, OR fails + case ("0", "0") => "0" // LEFT & RIGHT are false, OR fails case ("0", rightClause) => rightClause // LEFT is false, only tokenize RIGHT - case ("1", _) => "1" // LEFT is true, OR succeeds - case (leftClause, "0") => leftClause // RIGHT is false, only tokenize LEFT - case (_, "1") => "1" // RIGHT is true, OR succeeds + case ("1", _) => "1" // LEFT is true, OR succeeds + case (leftClause, "0") => leftClause // RIGHT is false, only tokenize LEFT + case (_, "1") => "1" // RIGHT is true, OR succeeds case (leftClause, rightClause) => s"$leftClause OR $rightClause" } case Xor => (surroundWithBrackets(left, col.operator), surroundWithBrackets(right, col.operator)) match { - case ("0", "0") => "0" // LEFT & RIGHT are false, XOR fails - case ("1", "1") => "0" // LEFT & RIGHT are true, XOR fails - case ("0", rightClause) => rightClause // LEFT is false, only tokenize RIGHT - case (leftClause, "0") => leftClause // RIGHT is false, only tokenize LEFT + case ("0", "0") => "0" // LEFT & RIGHT are false, XOR fails + case ("1", "1") => "0" // LEFT & RIGHT are true, XOR fails + case ("0", rightClause) => rightClause // LEFT is false, only tokenize RIGHT + case (leftClause, "0") => leftClause // RIGHT is false, only tokenize LEFT case ("1", rightClause) => s"not($rightClause)" // LEFT is true, RIGHT MUST BE FALSE - case (leftClause, "1") => s"not($leftClause)" // RIGHT is true, LEFT MUST BE FALSE + case (leftClause, "1") => s"not($leftClause)" // RIGHT is true, LEFT MUST BE FALSE case (leftClause, rightClause) => s"xor($leftClause, $rightClause)" } @@ -46,8 +46,8 @@ trait LogicalFunctionTokenizer { } } - private def surroundWithBrackets(col: TableColumn[Boolean], operator: LogicalOperator)( - implicit ctx: TokenizeContext + private def surroundWithBrackets(col: TableColumn[Boolean], operator: LogicalOperator)(implicit + ctx: TokenizeContext ): String = col match { case c: LogicalFunction if c.operator == And && operator == Or => surroundWithBrackets(tokenizeColumn(col)) diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/RandomFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/RandomFunctionTokenizer.scala index c3bd1cbb..687ff23d 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/RandomFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/RandomFunctionTokenizer.scala @@ -6,7 +6,7 @@ trait RandomFunctionTokenizer { self: ClickhouseTokenizerModule => def tokenizeRandomFunction(col: RandomFunction): String = col match { - case Rand() => "rand()" + case Rand() => "rand()" case Rand64() => "rand64()" } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/TypeCastFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/TypeCastFunctionTokenizer.scala index b028b534..2ee37174 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/TypeCastFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/TypeCastFunctionTokenizer.scala @@ -8,11 +8,13 @@ trait TypeCastFunctionTokenizer { self: ClickhouseTokenizerModule => protected def tokenizeTypeCastColumn(col: TypeCastColumn[_])(implicit ctx: TokenizeContext): String = { - def tknz[T](column: TableColumn[T], - valueType: SimpleColumnType, - orZero: Boolean, - orNull: Boolean, - defaultValue: Option[T]): String = { + def tknz[T]( + column: TableColumn[T], + valueType: SimpleColumnType, + orZero: Boolean, + orNull: Boolean, + defaultValue: Option[T] + ): String = { val postfix = if (orNull) "OrNull" else if (orZero) "OrZero" else "" val value = defaultValue match { case Some(value) => @@ -24,20 +26,20 @@ trait TypeCastFunctionTokenizer { } col match { - case c: UInt8 => tknz(c.tableColumn.column, ColumnType.UInt8, c.orZero, c.orNull, c.orDefault) - case c: UInt16 => tknz(c.tableColumn.column, ColumnType.UInt16, c.orZero, c.orNull, c.orDefault) - case c: UInt32 => tknz(c.tableColumn.column, ColumnType.UInt32, c.orZero, c.orNull, c.orDefault) - case c: UInt64 => tknz(c.tableColumn.column, ColumnType.UInt64, c.orZero, c.orNull, c.orDefault) - case c: Int8 => tknz(c.tableColumn.column, ColumnType.Int8, c.orZero, c.orNull, c.orDefault) - case c: Int16 => tknz(c.tableColumn.column, ColumnType.Int16, c.orZero, c.orNull, c.orDefault) - case c: Int32 => tknz(c.tableColumn.column, ColumnType.Int32, c.orZero, c.orNull, c.orDefault) - case c: Int64 => tknz(c.tableColumn.column, ColumnType.Int64, c.orZero, c.orNull, c.orDefault) - case c: Float32 => tknz(c.tableColumn.column, ColumnType.Float32, c.orZero, c.orNull, c.orDefault) - case c: Float64 => tknz(c.tableColumn.column, ColumnType.Float64, c.orZero, c.orNull, c.orDefault) - case c: DateRep => tknz(c.tableColumn.column, ColumnType.Date, c.orZero, c.orNull, c.orDefault) - case c: DateTimeRep => tknz(c.tableColumn.column, ColumnType.DateTime, c.orZero, c.orNull, c.orDefault) - case c: Uuid => tknz(c.tableColumn.column, ColumnType.UUID, c.orZero, c.orNull, c.orDefault) - case StringRep(tableColumn) => s"toString(${tokenizeColumn(tableColumn.column)})" + case c: UInt8 => tknz(c.tableColumn.column, ColumnType.UInt8, c.orZero, c.orNull, c.orDefault) + case c: UInt16 => tknz(c.tableColumn.column, ColumnType.UInt16, c.orZero, c.orNull, c.orDefault) + case c: UInt32 => tknz(c.tableColumn.column, ColumnType.UInt32, c.orZero, c.orNull, c.orDefault) + case c: UInt64 => tknz(c.tableColumn.column, ColumnType.UInt64, c.orZero, c.orNull, c.orDefault) + case c: Int8 => tknz(c.tableColumn.column, ColumnType.Int8, c.orZero, c.orNull, c.orDefault) + case c: Int16 => tknz(c.tableColumn.column, ColumnType.Int16, c.orZero, c.orNull, c.orDefault) + case c: Int32 => tknz(c.tableColumn.column, ColumnType.Int32, c.orZero, c.orNull, c.orDefault) + case c: Int64 => tknz(c.tableColumn.column, ColumnType.Int64, c.orZero, c.orNull, c.orDefault) + case c: Float32 => tknz(c.tableColumn.column, ColumnType.Float32, c.orZero, c.orNull, c.orDefault) + case c: Float64 => tknz(c.tableColumn.column, ColumnType.Float64, c.orZero, c.orNull, c.orDefault) + case c: DateRep => tknz(c.tableColumn.column, ColumnType.Date, c.orZero, c.orNull, c.orDefault) + case c: DateTimeRep => tknz(c.tableColumn.column, ColumnType.DateTime, c.orZero, c.orNull, c.orDefault) + case c: Uuid => tknz(c.tableColumn.column, ColumnType.UUID, c.orZero, c.orNull, c.orDefault) + case StringRep(tableColumn) => s"toString(${tokenizeColumn(tableColumn.column)})" case FixedString(tableColumn, n) => s"toFixedString(${tokenizeColumn(tableColumn.column)},$n)" case StringCutToZero(tableColumn) => s"toStringCutToZero(${tokenizeColumn(tableColumn.column)})" case Reinterpret(typeCastColumn) => s"reinterpretAs${tokenizeTypeCastColumn(typeCastColumn).substring(2)}" diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/marshalling/ClickhouseJsonSupport.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/marshalling/ClickhouseJsonSupport.scala index fb282457..1e0a58ac 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/marshalling/ClickhouseJsonSupport.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/marshalling/ClickhouseJsonSupport.scala @@ -3,7 +3,7 @@ package com.crobox.clickhouse.dsl.marshalling import com.crobox.clickhouse.time.IntervalStart import org.joda.time.format.{DateTimeFormatter, DateTimeFormatterBuilder, ISODateTimeFormat} import org.joda.time.{DateTime, DateTimeZone} -import spray.json.{JsNumber, JsString, JsValue, JsonFormat, deserializationError, _} +import spray.json.{deserializationError, JsNumber, JsString, JsValue, JsonFormat, _} import scala.util.Try import scala.util.matching.Regex @@ -29,8 +29,10 @@ trait ClickhouseJsonSupport { private val isoFormatter: DateTimeFormatter = ISODateTimeFormat.dateTimeNoMillis val readFormatter: DateTimeFormatter = new DateTimeFormatterBuilder() - .append(isoFormatter.getPrinter, - Array(isoFormatter.getParser, ISODateTimeFormat.date().withZone(DateTimeZone.UTC).getParser)) + .append( + isoFormatter.getPrinter, + Array(isoFormatter.getParser, ISODateTimeFormat.date().withZone(DateTimeZone.UTC).getParser) + ) .toFormatter .withOffsetParsed() @@ -48,7 +50,7 @@ trait ClickhouseJsonSupport { .plusMonths(relativeMonth.toInt - RelativeMonthsSinceUnixStart) .withZone(DateTimeZone.UTC) case date(dateOnly, timezoneId) => - //should handle quarter and year grouping as it returns a date + // should handle quarter and year grouping as it returns a date formatter .parseDateTime(dateOnly) .withZoneRetainFields(DateTimeZone.forID(timezoneId)) @@ -65,9 +67,9 @@ trait ClickhouseJsonSupport { // continue with parsing using the formatter dateTime.getOrElse { - try { + try formatter.parseDateTime(value) - } catch { + catch { case _: IllegalArgumentException => error(s"Couldn't parse $value into valid date time") case _: UnsupportedOperationException => error("Unsupported operation, programmatic misconfiguration?") diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DSLImprovements.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DSLImprovements.scala index 32727302..b76fdbbf 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DSLImprovements.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DSLImprovements.scala @@ -49,8 +49,8 @@ object DSLImprovements { def replaceColumn(name: String, column: Column): Seq[Column] = values.indexWhere(_.name == name) match { - case -1 => values ++ Seq(column) - case 0 => Seq(column) ++ values.slice(1, values.size) + case -1 => values ++ Seq(column) + case 0 => Seq(column) ++ values.slice(1, values.size) case idx => values.slice(0, idx) ++ Seq(column) ++ values.slice(idx + 1, values.size) } } @@ -108,7 +108,7 @@ object DSLImprovements { } def selectFromTable[T <: Table](): Option[T] = query.internalQuery.from.flatMap { - case _: InnerFromQuery => None + case _: InnerFromQuery => None case x: TableFromQuery[_] => Option(x.table.asInstanceOf[T]) } @@ -128,7 +128,7 @@ object DSLImprovements { // case _ => // } case _: TableFromQuery[_] => return query.andConstraint(condition) - case _ => + case _ => } query } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DateConditions.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DateConditions.scala index 3cb7d8ed..e18194c2 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DateConditions.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/DateConditions.scala @@ -15,49 +15,52 @@ import scala.language.implicitConversions trait DateConditions { /** - * 'smart' filter function that only optionally - * selects the timestampColumn if the startDate or endDate is not a 'full' day + * 'smart' filter function that only optionally selects the timestampColumn if the startDate or endDate is not a + * 'full' day */ - def dateTimeCondition(dateColumn: NativeColumn[LocalDate], - timestampColumn: NativeColumn[Long], - startDate: DateTime, - endDate: Option[DateTime]): ExpressionColumn[Boolean] = - // this + def dateTimeCondition( + dateColumn: NativeColumn[LocalDate], + timestampColumn: NativeColumn[Long], + startDate: DateTime, + endDate: Option[DateTime] + ): ExpressionColumn[Boolean] = + // this dateColumn >= startDate.withZone(DateTimeZone.UTC).toLocalDate and - noneIfStartOfDay(startDate).map(dt => timestampColumn >= dt.getMillis) and endDate.map( - ed => + noneIfStartOfDay(startDate).map(dt => timestampColumn >= dt.getMillis) and endDate.map(ed => noneIfStartOfDay(ed) // Must be smaller equals because of current day overlap .map(dt => dateColumn <= dt.toLocalDate and timestampColumn < dt.getMillis) // Must be smaller then since endDate is not inclusive .getOrElse(dateColumn < ed.withZone(DateTimeZone.UTC).toLocalDate) - ) + ) /** - * 'smart' filter function that only optionally - * selects the timestampColumn if the startDate or endDate is not a 'full' day + * 'smart' filter function that only optionally selects the timestampColumn if the startDate or endDate is not a + * 'full' day */ - def dateTimeCondition(dateColumn: NativeColumn[LocalDate], - timestampColumn: NativeColumn[Long], - startDate: Option[DateTime], - endDate: Option[DateTime]): Option[ExpressionColumn[Boolean]] = { + def dateTimeCondition( + dateColumn: NativeColumn[LocalDate], + timestampColumn: NativeColumn[Long], + startDate: Option[DateTime], + endDate: Option[DateTime] + ): Option[ExpressionColumn[Boolean]] = { - val startCondition: Option[ExpressionColumn[Boolean]] = startDate.map(sd => { + val startCondition: Option[ExpressionColumn[Boolean]] = startDate.map(sd => dateColumn >= sd.withZone(DateTimeZone.UTC).toLocalDate and noneIfStartOfDay(sd).map(dt => timestampColumn >= dt.getMillis) - }) + ) - val endCondition: Option[ExpressionColumn[Boolean]] = endDate.map(ed => { + val endCondition: Option[ExpressionColumn[Boolean]] = endDate.map(ed => noneIfStartOfDay(ed) // Must be smaller equals because of current day overlap .map(dt => dateColumn <= dt.toLocalDate and timestampColumn < dt.getMillis) // Must be smaller then since endDate is not inclusive .getOrElse(dateColumn < ed.withZone(DateTimeZone.UTC).toLocalDate) - }) + ) startCondition match { case Some(condition) => Option(condition and endCondition) - case None => endCondition + case None => endCondition } } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/QueryImprovements.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/QueryImprovements.scala index 3b27957c..c3b1a1fa 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/QueryImprovements.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/misc/QueryImprovements.scala @@ -13,14 +13,14 @@ object QueryImprovements extends LazyLogging { implicit class QueryImpr(query: Query) { - def execute[V: JsonReader]( - implicit executionContext: ExecutionContext, + def execute[V: JsonReader](implicit + executionContext: ExecutionContext, queryExecutor: QueryExecutor ): Future[QueryResult[V]] = queryExecutor.execute(query) - def executeWithLogging[V: JsonReader](debug: Boolean)( - implicit executionContext: ExecutionContext, + def executeWithLogging[V: JsonReader](debug: Boolean)(implicit + executionContext: ExecutionContext, queryExecutor: QueryExecutor ): Future[QueryResult[V]] = { if (debug) @@ -30,8 +30,8 @@ object QueryImprovements extends LazyLogging { queryExecutor.execute(query) } - def executeWithLogging[V: JsonReader](traceId: String)( - implicit executionContext: ExecutionContext, + def executeWithLogging[V: JsonReader](traceId: String)(implicit + executionContext: ExecutionContext, queryExecutor: QueryExecutor ): Future[QueryResult[V]] = { logger.info( @@ -40,21 +40,20 @@ object QueryImprovements extends LazyLogging { queryExecutor.execute(query) } - def executeWithLogging[V: JsonReader](traceId: Option[String])( - implicit executionContext: ExecutionContext, + def executeWithLogging[V: JsonReader](traceId: Option[String])(implicit + executionContext: ExecutionContext, queryExecutor: QueryExecutor ): Future[QueryResult[V]] = { - traceId.foreach( - id => - logger.info( - s"[$id] ${tokenizer.toSql(query.internalQuery)(TokenizeContext(queryExecutor.serverVersion))}" + traceId.foreach(id => + logger.info( + s"[$id] ${tokenizer.toSql(query.internalQuery)(TokenizeContext(queryExecutor.serverVersion))}" ) ) queryExecutor.execute(query) } - def executeWithLogging[V: JsonReader]( - implicit executionContext: ExecutionContext, + def executeWithLogging[V: JsonReader](implicit + executionContext: ExecutionContext, queryExecutor: QueryExecutor ): Future[QueryResult[V]] = { logger.info( diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/package.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/package.scala index 127cb576..ba615e59 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/package.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/package.scala @@ -10,15 +10,15 @@ import scala.util.Try package object dsl extends ClickhouseColumnFunctions with QueryFactory with QueryValueFormats { - //Naive union type context bound + // Naive union type context bound trait Contra[-A] type Union[A, B] = Contra[A] <:< Contra[B] @deprecated("Please use QueryImpr") implicit class QueryExecution(query: Query) { - def execute[V: JsonReader]( - implicit executionContext: ExecutionContext, + def execute[V: JsonReader](implicit + executionContext: ExecutionContext, queryExecutor: QueryExecutor ): Future[QueryResult[V]] = queryExecutor.execute(query) } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/parallel/package.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/parallel/package.scala index 0b1897cb..b805d0bb 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/parallel/package.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/parallel/package.scala @@ -7,7 +7,8 @@ package object parallel { implicit class ParallelizableQuery(operationalQuery: OperationalQuery) { /** - * Merging 2 queries will retaining all grouping and selection of both queries and join them using the grouped columns + * Merging 2 queries will retaining all grouping and selection of both queries and join them using the grouped + * columns */ def merge(query: OperationalQuery): MergingQueries = MergingQueries(operationalQuery, query, AllLeftJoin) @@ -16,10 +17,11 @@ package object parallel { /** * Smart joins with automated grouping, sorting and then joining on the matching columns */ - case class MergingQueries(rightTableQry: OperationalQuery, - leftTableQry: OperationalQuery, - joinType: JoinQuery.JoinType = AllLeftJoin) - extends QueryFactory { + case class MergingQueries( + rightTableQry: OperationalQuery, + leftTableQry: OperationalQuery, + joinType: JoinQuery.JoinType = AllLeftJoin + ) extends QueryFactory { override def on(columns: Column*): OperationalQuery = { val rightTableQryGrouped = rightTableQry.groupBy(columns: _*).orderBy(columns: _*) @@ -34,9 +36,11 @@ package object parallel { def joinWith(joinType: JoinQuery.JoinType): MergingQueries = this.copy(joinType = joinType) - private def _on(rightTableQry: OperationalQuery, - leftTableQry: OperationalQuery, - joinKeys: Seq[Column]): OperationalQuery = { + private def _on( + rightTableQry: OperationalQuery, + leftTableQry: OperationalQuery, + joinKeys: Seq[Column] + ): OperationalQuery = { def recursiveCollectCols(qry: InternalQuery, cols: Seq[Column] = Seq.empty): Seq[Column] = { val uQry = qry @@ -60,11 +64,11 @@ package object parallel { } } - //Forcefully add the columns of the right table(s), because 'select *' on a join only returns the values of the left table in clickhouse + // Forcefully add the columns of the right table(s), because 'select *' on a join only returns the values of the left table in clickhouse val joinCols = recursiveCollectCols(rightTableQry.internalQuery) - //filter out cols that are already available trough grouping + // filter out cols that are already available trough grouping .filterNot(thisCol => joinKeys.exists(_.name == thisCol.name)) - //Map to a simple column so that we just add the select to top level + // Map to a simple column so that we just add the select to top level .map(origCol => RefColumn(origCol.name)) .toList .filterNot(_.name == EmptyColumn.name) diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTable.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTable.scala index 6d013799..e95b947e 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTable.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTable.scala @@ -3,7 +3,8 @@ package com.crobox.clickhouse.dsl.schemabuilder import com.crobox.clickhouse.dsl.{ClickhouseStatement, NativeColumn} /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 2-1-17 */ case class AlterTable(tableName: String, actions: Seq[ColumnOperation]) extends ClickhouseSchemaStatement { @@ -11,9 +12,11 @@ case class AlterTable(tableName: String, actions: Seq[ColumnOperation]) extends /** * Returns the query string for this statement. * - * @return String containing the Clickhouse dialect SQL statement + * @return + * String containing the Clickhouse dialect SQL statement */ - override def query: String = s"ALTER TABLE ${ClickhouseStatement.quoteIdentifier(tableName)} ${actions.mkString(", ")}" + override def query: String = + s"ALTER TABLE ${ClickhouseStatement.quoteIdentifier(tableName)} ${actions.mkString(", ")}" } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/ClickhouseSchemaStatement.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/ClickhouseSchemaStatement.scala index 0171c833..30e77e54 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/ClickhouseSchemaStatement.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/ClickhouseSchemaStatement.scala @@ -3,7 +3,8 @@ package com.crobox.clickhouse.dsl.schemabuilder import com.crobox.clickhouse.dsl.ClickhouseStatement /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 2-1-17 */ abstract class ClickhouseSchemaStatement extends ClickhouseStatement { diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Column.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Column.scala index 457d9686..3b2f3520 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Column.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Column.scala @@ -3,14 +3,15 @@ package com.crobox.clickhouse.dsl.schemabuilder import com.crobox.clickhouse.dsl.NativeColumn /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 30-12-16 */ sealed trait ColumnType object ColumnType { - //TODO infer the types based on the generic passed to the tablecolumn + // TODO infer the types based on the generic passed to the tablecolumn abstract class SimpleColumnType(value: String) extends ColumnType { override def toString: String = value @@ -62,8 +63,10 @@ object ColumnType { case object DateTime extends SimpleColumnType("DateTime") case class Array(columnType: ColumnType) extends ColumnType { - require(!columnType.isInstanceOf[Nested] && !columnType.isInstanceOf[Array], - "Only simple types are allowed in Array") + require( + !columnType.isInstanceOf[Nested] && !columnType.isInstanceOf[Array], + "Only simple types are allowed in Array" + ) override def toString: String = s"Array($columnType)" } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabase.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabase.scala index 1bad41f4..57a1ed5c 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabase.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabase.scala @@ -3,16 +3,20 @@ package com.crobox.clickhouse.dsl.schemabuilder import com.crobox.clickhouse.dsl.ClickhouseStatement /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 2-1-17 */ -case class CreateDatabase(dbName: String, ifNotExists: Boolean = false, clusterName : Option[String] = None) - extends ClickhouseSchemaStatement with DistributedDdlSupport { +case class CreateDatabase(dbName: String, ifNotExists: Boolean = false, clusterName: Option[String] = None) + extends ClickhouseSchemaStatement + with DistributedDdlSupport { /** * Returns the query string for this statement. * - * @return String containing the Clickhouse dialect SQL statement + * @return + * String containing the Clickhouse dialect SQL statement */ - override def query: String = s"CREATE DATABASE${printIfNotExists(ifNotExists)} ${ClickhouseStatement.quoteIdentifier(dbName)}${printOnCluster()}" + override def query: String = + s"CREATE DATABASE${printIfNotExists(ifNotExists)} ${ClickhouseStatement.quoteIdentifier(dbName)}${printOnCluster()}" } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTable.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTable.scala index 5cd051c0..f9e46115 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTable.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTable.scala @@ -3,21 +3,21 @@ package com.crobox.clickhouse.dsl.schemabuilder import com.crobox.clickhouse.dsl.{ClickhouseStatement, Table} /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 30-12-16 */ -case class CreateTable(table: Table, - engine: Engine, - ifNotExists: Boolean = false, - clusterName : Option[String] = None) - extends ClickhouseSchemaStatement with DistributedDdlSupport { +case class CreateTable(table: Table, engine: Engine, ifNotExists: Boolean = false, clusterName: Option[String] = None) + extends ClickhouseSchemaStatement + with DistributedDdlSupport { require(table.columns.nonEmpty, "Cannot create a table without any columns") /** * Returns the query string for this statement. * - * @return String containing the Clickhouse dialect SQL statement + * @return + * String containing the Clickhouse dialect SQL statement */ // TODO migrate this to the tokenizer as well override def query: String = diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/DistributedDdlSupport.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/DistributedDdlSupport.scala index f018df9f..a82507f9 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/DistributedDdlSupport.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/DistributedDdlSupport.scala @@ -4,10 +4,9 @@ import com.crobox.clickhouse.dsl.ClickhouseStatement trait DistributedDdlSupport { - val clusterName : Option[String] + val clusterName: Option[String] - protected[schemabuilder] def printOnCluster() : String = { + protected[schemabuilder] def printOnCluster(): String = clusterName.map(cluster => s" ON CLUSTER ${ClickhouseStatement.quoteIdentifier(cluster)}").getOrElse("") - } } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Engine.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Engine.scala index a0e8d915..e315701b 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Engine.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/schemabuilder/Engine.scala @@ -5,7 +5,8 @@ import com.crobox.clickhouse.dsl.{Column, NativeColumn} import org.joda.time.LocalDate /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 30-12-16 */ sealed trait Engine {} @@ -28,7 +29,7 @@ object Engine { /** * https://clickhouse.yandex/docs/en/operations/table_engines/distributed/ - * */ + */ case class DistributedEngine(cluster: String, database: String, targetTable: String, shardingKey: Option[String]) extends Engine { override def toString: String = @@ -69,75 +70,88 @@ object Engine { |${statements.mkString("\n")}""".stripMargin } - case class MergeTree(partition: Seq[String], - primaryKey: Seq[Column], - samplingExpression: Option[String] = None, - indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, - ttl: Iterable[TTL] = Iterable.empty) - extends MergeTreeEngine("MergeTree") + case class MergeTree( + partition: Seq[String], + primaryKey: Seq[Column], + samplingExpression: Option[String] = None, + indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, + ttl: Iterable[TTL] = Iterable.empty + ) extends MergeTreeEngine("MergeTree") object MergeTree { def apply(dateColumn: NativeColumn[LocalDate], primaryKey: Seq[Column]): MergeTree = apply(monthPartitionCompat(dateColumn), primaryKey) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - samplingExpression: Option[String]): MergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + samplingExpression: Option[String] + ): MergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, samplingExpression = samplingExpression) def apply(dateColumn: NativeColumn[LocalDate], primaryKey: Seq[Column], indexGranularity: Int): MergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, indexGranularity = indexGranularity) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - samplingExpression: Option[String], - indexGranularity: Int): MergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + samplingExpression: Option[String], + indexGranularity: Int + ): MergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, samplingExpression, indexGranularity) } - case class ReplacingMergeTree(partition: Seq[String], - primaryKey: Seq[Column], - samplingExpression: Option[String] = None, - indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, - version: Option[Column] = None, - ttl: Iterable[TTL] = Iterable.empty) - extends MergeTreeEngine("ReplacingMergeTree" + version.map(col => s"(${col.name})").getOrElse("")) + case class ReplacingMergeTree( + partition: Seq[String], + primaryKey: Seq[Column], + samplingExpression: Option[String] = None, + indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, + version: Option[Column] = None, + ttl: Iterable[TTL] = Iterable.empty + ) extends MergeTreeEngine("ReplacingMergeTree" + version.map(col => s"(${col.name})").getOrElse("")) object ReplacingMergeTree { def apply(dateColumn: NativeColumn[LocalDate], primaryKey: Seq[Column]): ReplacingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - samplingExpression: Option[String]): ReplacingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + samplingExpression: Option[String] + ): ReplacingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, samplingExpression = samplingExpression) def apply(dateColumn: NativeColumn[LocalDate], primaryKey: Seq[Column], indexGranularity: Int): ReplacingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, indexGranularity = indexGranularity) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - samplingExpression: Option[String], - indexGranularity: Int): ReplacingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + samplingExpression: Option[String], + indexGranularity: Int + ): ReplacingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, samplingExpression, indexGranularity, version = None) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - samplingExpression: Option[String], - indexGranularity: Int, - version: Option[Column]): ReplacingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + samplingExpression: Option[String], + indexGranularity: Int, + version: Option[Column] + ): ReplacingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, samplingExpression, indexGranularity, version) } - case class SummingMergeTree(partition: Seq[String], - primaryKey: Seq[Column], - summingColumns: Seq[Column] = Seq.empty, - samplingExpression: Option[String] = None, - indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, - ttl: Iterable[TTL] = Iterable.empty) - extends MergeTreeEngine("SummingMergeTree") { + case class SummingMergeTree( + partition: Seq[String], + primaryKey: Seq[Column], + summingColumns: Seq[Column] = Seq.empty, + samplingExpression: Option[String] = None, + indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, + ttl: Iterable[TTL] = Iterable.empty + ) extends MergeTreeEngine("SummingMergeTree") { override def toString: String = { val summingColArg = @@ -154,45 +168,56 @@ object Engine { def apply(dateColumn: NativeColumn[LocalDate], primaryKey: Seq[Column]): SummingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - summingColumns: Seq[Column]): SummingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + summingColumns: Seq[Column] + ): SummingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, summingColumns) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - summingColumns: Seq[Column], - samplingExpression: Option[String], - indexGranularity: Int): SummingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + summingColumns: Seq[Column], + samplingExpression: Option[String], + indexGranularity: Int + ): SummingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, summingColumns, samplingExpression, indexGranularity) } - case class AggregatingMergeTree(partition: Seq[String], - primaryKey: Seq[Column], - samplingExpression: Option[String] = None, - indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, - ttl: Iterable[TTL] = Iterable.empty) - extends MergeTreeEngine("AggregatingMergeTree") + case class AggregatingMergeTree( + partition: Seq[String], + primaryKey: Seq[Column], + samplingExpression: Option[String] = None, + indexGranularity: Int = MergeTreeEngine.DefaultIndexGranularity, + ttl: Iterable[TTL] = Iterable.empty + ) extends MergeTreeEngine("AggregatingMergeTree") object AggregatingMergeTree { def apply(dateColumn: NativeColumn[LocalDate], primaryKey: Seq[Column]): AggregatingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - samplingExpression: Option[String]): AggregatingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + samplingExpression: Option[String] + ): AggregatingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, samplingExpression = samplingExpression) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - indexGranularity: Int): AggregatingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + indexGranularity: Int + ): AggregatingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, indexGranularity = indexGranularity) - def apply(dateColumn: NativeColumn[LocalDate], - primaryKey: Seq[Column], - samplingExpression: Option[String], - indexGranularity: Int): AggregatingMergeTree = + def apply( + dateColumn: NativeColumn[LocalDate], + primaryKey: Seq[Column], + samplingExpression: Option[String], + indexGranularity: Int + ): AggregatingMergeTree = apply(monthPartitionCompat(dateColumn), primaryKey, samplingExpression, indexGranularity) } diff --git a/dsl/src/test/scala/com/crobox/clickhouse/DslTestSpec.scala b/dsl/src/test/scala/com/crobox/clickhouse/DslTestSpec.scala index 435e2108..b879e46c 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/DslTestSpec.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/DslTestSpec.scala @@ -43,7 +43,6 @@ trait DslTestSpec } else sql.substring(0, sql.indexOf(" FORMAT")).trim } - def shouldMatch(query: OperationalQuery, expected: String): Assertion = { + def shouldMatch(query: OperationalQuery, expected: String): Assertion = toSql(query.internalQuery, None) should matchSQL(expected) - } } diff --git a/dsl/src/test/scala/com/crobox/clickhouse/TestSchema.scala b/dsl/src/test/scala/com/crobox/clickhouse/TestSchema.scala index c4dccc68..8e2e04a4 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/TestSchema.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/TestSchema.scala @@ -23,9 +23,9 @@ trait TestSchema { def read(json: JsValue): UUID = json match { case JsString(s) => - try { + try UUID.fromString(s) - } catch { + catch { case t: Throwable => deserializationError(s"Invalid UUID '$s' found", t) } case _ => deserializationError("String expected") @@ -33,48 +33,52 @@ trait TestSchema { } case object OneTestTable extends Table { - override lazy val database: String = _db - override val name: String = "captainAmerica" + override lazy val database: String = _db + override val name: String = "captainAmerica" override val columns: List[NativeColumn[_]] = List(shieldId, timestampColumn, numbers) } case object TwoTestTable extends Table { - override lazy val database: String = _db - override val name: String = "twoTestTable" + override lazy val database: String = _db + override val name: String = "twoTestTable" override val columns: List[NativeColumn[_]] = List(itemId, col1, col2, col3, col4, nativeUUID) } case object ThreeTestTable extends Table { - override lazy val database: String = _db - override val name: String = "threeTestTable" + override lazy val database: String = _db + override val name: String = "threeTestTable" override val columns: List[NativeColumn[_]] = List(itemId, col2, col4, col5, col6) } - val shieldId = NativeColumn[String]("shield_id") - val itemId = NativeColumn[String]("item_id") - val numbers = NativeColumn[Seq[Int]]("numbers", ColumnType.Array(ColumnType.UInt32)) - val col1 = NativeColumn[String]("column_1") - val col2 = NativeColumn[Int]("column_2", ColumnType.UInt32) - val col3 = NativeColumn[String]("column_3") - val col4 = NativeColumn[String]("column_4") - val col5 = NativeColumn[String]("column_5") - val col6 = NativeColumn[String]("column_6") + val shieldId = NativeColumn[String]("shield_id") + val itemId = NativeColumn[String]("item_id") + val numbers = NativeColumn[Seq[Int]]("numbers", ColumnType.Array(ColumnType.UInt32)) + val col1 = NativeColumn[String]("column_1") + val col2 = NativeColumn[Int]("column_2", ColumnType.UInt32) + val col3 = NativeColumn[String]("column_3") + val col4 = NativeColumn[String]("column_4") + val col5 = NativeColumn[String]("column_5") + val col6 = NativeColumn[String]("column_6") val timestampColumn = NativeColumn[Long]("ts", ColumnType.UInt64) - val nativeUUID = NativeColumn[UUID]("uuid", ColumnType.UUID) + val nativeUUID = NativeColumn[UUID]("uuid", ColumnType.UUID) case class Table1Entry(shieldId: UUID, date: DateTime = DateTime.now(), numbers: Seq[Int] = Seq()) - case class Table2Entry(itemId: UUID, - firstColumn: String, - secondColumn: Int, - thirdColumn: String, - forthColumn: Option[String]) - - case class Table3Entry(itemId: UUID, - secondColumn: Int, - forthColumn: Option[String], - fifthColumn: String, - sixthColumn: String) + case class Table2Entry( + itemId: UUID, + firstColumn: String, + secondColumn: Int, + thirdColumn: String, + forthColumn: Option[String] + ) + + case class Table3Entry( + itemId: UUID, + secondColumn: Int, + forthColumn: Option[String], + fifthColumn: String, + sixthColumn: String + ) implicit val entry1Format: RootJsonFormat[Table1Entry] = jsonFormat(Table1Entry.apply, "shield_id", "ts", "numbers") diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/ClickhouseStatementTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/ClickhouseStatementTest.scala index a6a0b9e6..13fc1000 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/ClickhouseStatementTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/ClickhouseStatementTest.scala @@ -3,7 +3,8 @@ package com.crobox.clickhouse.dsl import com.crobox.clickhouse.DslTestSpec /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 11-1-17 */ class ClickhouseStatementTest extends DslTestSpec { diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/JoinQueryTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/JoinQueryTest.scala index 3138a5fd..2276005f 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/JoinQueryTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/JoinQueryTest.scala @@ -11,85 +11,85 @@ class JoinQueryTest extends DslTestSpec with TableDrivenPropertyChecks { select(itemId).from(select(itemId).from(TwoTestTable).join(JoinQuery.CrossJoin, ThreeTestTable)) toSql(query.internalQuery) should matchSQL( s"SELECT item_id FROM (SELECT item_id FROM ${TwoTestTable.quoted} AS L1 " + - s"CROSS JOIN (SELECT * FROM ${ThreeTestTable.quoted}) AS R1) FORMAT JSON" + s"CROSS JOIN (SELECT * FROM ${ThreeTestTable.quoted}) AS R1) FORMAT JSON" ) } it should s"TABLE - TABLE - using" in { val query: OperationalQuery = - select(shieldId as itemId) - .from(OneTestTable) - .where(notEmpty(itemId)) - .join(InnerJoin, TwoTestTable) using itemId + select(shieldId as itemId) + .from(OneTestTable) + .where(notEmpty(itemId)) + .join(InnerJoin, TwoTestTable) using itemId toSql(query.internalQuery) should matchSQL( s"SELECT shield_id AS item_id FROM ${OneTestTable.quoted} AS L1 " + - s"INNER JOIN (SELECT * FROM ${TwoTestTable.quoted}) AS R1 USING item_id WHERE notEmpty(item_id) FORMAT JSON" + s"INNER JOIN (SELECT * FROM ${TwoTestTable.quoted}) AS R1 USING item_id WHERE notEmpty(item_id) FORMAT JSON" ) } it should s"TABLE - QUERY - using" in { val query = - select(shieldId as itemId) - .from(OneTestTable) - .where(notEmpty(itemId)) - .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId + select(shieldId as itemId) + .from(OneTestTable) + .where(notEmpty(itemId)) + .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId toSql(query.internalQuery) should matchSQL( s"SELECT shield_id AS item_id FROM ${OneTestTable.quoted} AS L1 " + - s"INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS R1 USING item_id WHERE notEmpty(item_id) FORMAT JSON" + s"INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + + s"WHERE notEmpty(item_id)) AS R1 USING item_id WHERE notEmpty(item_id) FORMAT JSON" ) } it should s"QUERY - TABLE - using" in { val query = - select(dsl.all) - .from( - select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId)) - ) - .join(InnerJoin, TwoTestTable) - .where(notEmpty(itemId)) using itemId + select(dsl.all) + .from( + select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId)) + ) + .join(InnerJoin, TwoTestTable) + .where(notEmpty(itemId)) using itemId toSql(query.internalQuery) should matchSQL( s"SELECT * FROM (SELECT shield_id AS item_id FROM ${OneTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT * FROM ${TwoTestTable.quoted}) AS R1 " + - s"USING item_id WHERE notEmpty(item_id) FORMAT JSON" + s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT * FROM ${TwoTestTable.quoted}) AS R1 " + + s"USING item_id WHERE notEmpty(item_id) FORMAT JSON" ) } it should s"QUERY - QUERY - using" in { val query = - select(dsl.all) - .from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) - .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId + select(dsl.all) + .from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) + .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId toSql(query.internalQuery) should matchSQL( s"SELECT * FROM (SELECT shield_id AS item_id FROM ${OneTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS R1 USING item_id FORMAT JSON" + s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + + s"WHERE notEmpty(item_id)) AS R1 USING item_id FORMAT JSON" ) } // ON --> check prefix per ON condition it should s"QUERY - QUERY - on simple" in { val query = - select(dsl.all) - .from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) - .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) on itemId + select(dsl.all) + .from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) + .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) on itemId toSql(query.internalQuery) should matchSQL( s"SELECT * FROM (SELECT shield_id AS item_id FROM ${OneTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS R1 ON L1.item_id = R1.item_id FORMAT JSON" + s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + + s"WHERE notEmpty(item_id)) AS R1 ON L1.item_id = R1.item_id FORMAT JSON" ) } // ON --> check prefix per ON condition it should s"QUERY - QUERY - on complex" in { val query = - select(dsl.all) - .from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) - .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) on ((itemId, "<=", itemId)) + select(dsl.all) + .from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) + .join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) on ((itemId, "<=", itemId)) toSql(query.internalQuery) should matchSQL( s"SELECT * FROM (SELECT shield_id AS item_id FROM ${OneTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + - s"WHERE notEmpty(item_id)) AS R1 ON L1.item_id <= R1.item_id FORMAT JSON" + s"WHERE notEmpty(item_id)) AS L1 INNER JOIN (SELECT item_id, column_2 FROM ${TwoTestTable.quoted} " + + s"WHERE notEmpty(item_id)) AS R1 ON L1.item_id <= R1.item_id FORMAT JSON" ) } @@ -101,10 +101,10 @@ class JoinQueryTest extends DslTestSpec with TableDrivenPropertyChecks { it should s"fail on set on and using" in { val query: OperationalQuery = - select(shieldId as itemId) - .from(OneTestTable) - .where(notEmpty(itemId)) - .join(InnerJoin, TwoTestTable) using itemId on itemId + select(shieldId as itemId) + .from(OneTestTable) + .where(notEmpty(itemId)) + .join(InnerJoin, TwoTestTable) using itemId on itemId an[AssertionError] shouldBe thrownBy(toSql(query.internalQuery)) } diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryMergeTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryMergeTest.scala index 0b07f1ef..9f0955f8 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryMergeTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryMergeTest.scala @@ -1,7 +1,7 @@ package com.crobox.clickhouse.dsl import com.crobox.clickhouse.dsl.parallel._ -import com.crobox.clickhouse.{DslTestSpec, dsl => CHDsl} +import com.crobox.clickhouse.{dsl => CHDsl, DslTestSpec} import java.util.UUID @@ -43,7 +43,7 @@ class QueryMergeTest extends DslTestSpec { val left: OperationalQuery = select(CHDsl.all) from OneTestTable where shieldId.isEq(expectedUUID) val right: OperationalQuery = select(CHDsl.all) from TwoTestTable where (col3 isEq "wompalama") val right2: OperationalQuery = select(CHDsl.all) from ThreeTestTable where shieldId.isEq(expectedUUID) - val query = right2 merge (right) on timestampColumn merge (left) on timestampColumn + val query = right2 merge right on timestampColumn merge left on timestampColumn // PURE SPECULATIVE / SQL ONLY // THE REASON WHY IT'S NOT --> ON twoTestTable.ts is that twoTestTable DOESN'T have a ts column. diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryTest.scala index dfd9293c..ad7ead99 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryTest.scala @@ -21,19 +21,19 @@ class QueryTest extends DslTestSpec { val query = select(col1, shieldId).from(OneTestTable).join(InnerJoin, TwoTestTable) using shieldId toSql(query.internalQuery) should matchSQL( s"SELECT column_1, shield_id FROM $database.captainAmerica AS L1 INNER JOIN (SELECT * " + - s"FROM $database.twoTestTable) AS R1 USING shield_id FORMAT JSON" + s"FROM $database.twoTestTable) AS R1 USING shield_id FORMAT JSON" ) } it should "generate inner join" in { - val expectedUUID = UUID.randomUUID() - val innerQuery: OperationalQuery = select(shieldId as itemId) from OneTestTable where shieldId.isEq(expectedUUID) + val expectedUUID = UUID.randomUUID() + val innerQuery: OperationalQuery = select(shieldId as itemId) from OneTestTable where shieldId.isEq(expectedUUID) val joinInnerQuery: OperationalQuery = select(itemId) from TwoTestTable where (col3 isEq "wompalama") - val query = select(col1, shieldId) from innerQuery join (InnerJoin, joinInnerQuery) using itemId + val query = select(col1, shieldId) from innerQuery join (InnerJoin, joinInnerQuery) using itemId toSql(query.internalQuery) should matchSQL( s"SELECT column_1, shield_id FROM (SELECT shield_id AS item_id FROM $database.captainAmerica " + - s"WHERE shield_id = '$expectedUUID') AS L1 INNER JOIN (SELECT item_id FROM $database.twoTestTable " + - s"WHERE column_3 = 'wompalama') AS R1 USING item_id FORMAT JSON" + s"WHERE shield_id = '$expectedUUID') AS L1 INNER JOIN (SELECT item_id FROM $database.twoTestTable " + + s"WHERE column_3 = 'wompalama') AS R1 USING item_id FORMAT JSON" ) } @@ -87,8 +87,7 @@ class QueryTest extends DslTestSpec { val query = select(shieldId) from OneTestTable val query2 = select(itemId) from OneTestTable where col2 >= 2 val composed = query + query2 - composed should matchPattern { - case Failure(_: IllegalArgumentException) => + composed should matchPattern { case Failure(_: IllegalArgumentException) => } } @@ -99,9 +98,10 @@ class QueryTest extends DslTestSpec { it should "parse column function in filter" in { - val query = select(minus(NativeColumn[LocalDate]("date"), NativeColumn[Double]("double"))) from OneTestTable where (sum( - col2 - ) > 0) + val query = + select(minus(NativeColumn[LocalDate]("date"), NativeColumn[Double]("double"))) from OneTestTable where (sum( + col2 + ) > 0) toSql(query.internalQuery) should matchSQL( s"SELECT date - double FROM $database.captainAmerica WHERE sum(column_2) > 0 FORMAT JSON" ) @@ -122,16 +122,14 @@ class QueryTest extends DslTestSpec { val composed = query + query2 val composed2 = composed + query3 - composed should matchPattern { - case t: Success[_] => + composed should matchPattern { case t: Success[_] => } toSql(composed.get.internalQuery) should matchSQL( s"SELECT shield_id FROM $database.captainAmerica FORMAT JSON" ) - composed2 should matchPattern { - case t: Success[_] => + composed2 should matchPattern { case t: Success[_] => } toSql(composed2.get.internalQuery) should matchSQL( s"SELECT shield_id FROM $database.captainAmerica WHERE column_2 >= 4 FORMAT JSON" @@ -142,9 +140,8 @@ class QueryTest extends DslTestSpec { val query = select(shieldId) from OneTestTable val query2 = select(shieldId, itemId) from OneTestTable - an[IllegalArgumentException] should be thrownBy { - query.unionAll(query2) - } + an[IllegalArgumentException] should be thrownBy + query.unionAll(query2) } it should "perform the union of multiple tables" in { diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/column/INFunctionsTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/column/INFunctionsTest.scala index 7fbe0c7b..91ef296d 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/column/INFunctionsTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/column/INFunctionsTest.scala @@ -24,10 +24,12 @@ class INFunctionsTest extends DslTestSpec { |SELECT item_id FROM ${OneTestTable.quoted} WHERE item_id IN ('a', 'b') |""".stripMargin) - toSQL(select(itemId.in(Seq("a", "b")) as "l") - .from(OneTestTable) - .where(itemId.isEq("a")), - false) should matchSQL(s""" + toSQL( + select(itemId.in(Seq("a", "b")) as "l") + .from(OneTestTable) + .where(itemId.isEq("a")), + false + ) should matchSQL(s""" |SELECT item_id IN ('a', 'b') AS l FROM ${OneTestTable.quoted} WHERE item_id = 'a' |""".stripMargin) } @@ -58,14 +60,12 @@ class INFunctionsTest extends DslTestSpec { it should "use tableAlias for IN multiple tables" in { toSQL( - ( - select(col4) - .from(TwoTestTable) - .where( - col4.in(select(col4).from(ThreeTestTable)) and + select(col4) + .from(TwoTestTable) + .where( + col4.in(select(col4).from(ThreeTestTable)) and col2.in(select(col2).from(TwoTestTable)) and col2.in(select(col4).from(ThreeTestTable)) - ) ) ) should matchSQL( s""" diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/EmptyFunctionTokenizerTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/EmptyFunctionTokenizerTest.scala index 80536c19..f50f95bc 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/EmptyFunctionTokenizerTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/EmptyFunctionTokenizerTest.scala @@ -1,7 +1,7 @@ package com.crobox.clickhouse.dsl.language import com.crobox.clickhouse.dsl._ -import com.crobox.clickhouse.{DslTestSpec, dsl} +import com.crobox.clickhouse.{dsl, DslTestSpec} class EmptyFunctionTokenizerTest extends DslTestSpec { diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizerTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizerTest.scala index 5902e3b6..9e25a0d9 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizerTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizerTest.scala @@ -106,7 +106,9 @@ class LogicalFunctionTokenizerTest extends DslTestSpec { ) } - def conditionOr(nr: Seq[Int]): Option[TableColumn[Boolean]] = Option(nr.map(x => col2 isEq x).reduce((a, b) => (a or b))) + def conditionOr(nr: Seq[Int]): Option[TableColumn[Boolean]] = Option( + nr.map(x => col2 isEq x).reduce((a, b) => a or b) + ) def conditionAnd(nr: Seq[Int]): Option[TableColumn[Boolean]] = Option(nr.map(x => col2 isEq x).reduce((a, b) => a and b)) diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/TypeCaseFunctionTokenizerTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/TypeCaseFunctionTokenizerTest.scala index a7410910..55b5f966 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/TypeCaseFunctionTokenizerTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/TypeCaseFunctionTokenizerTest.scala @@ -6,7 +6,9 @@ import com.crobox.clickhouse.dsl._ class TypeCaseFunctionTokenizerTest extends DslTestSpec { it should "succeed for UUID functions" in { - toSQL(select(toUUID(const("00000000-0000-0000-0000-000000000000")))) shouldBe "SELECT toUUID('00000000-0000-0000-0000-000000000000')" + toSQL( + select(toUUID(const("00000000-0000-0000-0000-000000000000"))) + ) shouldBe "SELECT toUUID('00000000-0000-0000-0000-000000000000')" toSQL(select(toUUIDOrZero(const("123")))) shouldBe "SELECT toUUIDOrZero('123')" toSQL(select(toUUIDOrNull(const("123")))) shouldBe "SELECT toUUIDOrNull('123')" } diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTableTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTableTest.scala index 5f34b392..1f4b7b6c 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTableTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/AlterTableTest.scala @@ -5,7 +5,8 @@ import com.crobox.clickhouse.dsl.NativeColumn import com.crobox.clickhouse.dsl.schemabuilder.ColumnOperation.{AddColumn, DropColumn, ModifyColumn} /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 2-1-17 */ class AlterTableTest extends DslTestSpec { diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/ColumnTypeTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/ColumnTypeTest.scala index f5814be7..dad95741 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/ColumnTypeTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/ColumnTypeTest.scala @@ -4,7 +4,8 @@ import com.crobox.clickhouse.DslTestSpec import com.crobox.clickhouse.dsl.NativeColumn /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 30-12-16 */ class ColumnTypeTest extends DslTestSpec { @@ -15,11 +16,10 @@ class ColumnTypeTest extends DslTestSpec { ) } - it should "deny double Nesting" in { + it should "deny double Nesting" in intercept[IllegalArgumentException] { ColumnType.Nested(NativeColumn("a", ColumnType.Nested(NativeColumn("b")))) } - } it should "support multiple arguments for AggregateFunction column" in { ColumnType.AggregateFunctionColumn("uniq", ColumnType.String).toString should be( diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabaseTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabaseTest.scala index 75f6f59c..bf7dcb16 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabaseTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateDatabaseTest.scala @@ -3,16 +3,16 @@ package com.crobox.clickhouse.dsl.schemabuilder import com.crobox.clickhouse.DslTestSpec /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 2-1-17 */ class CreateDatabaseTest extends DslTestSpec { - it should "deny creating invalid databases" in { + it should "deny creating invalid databases" in intercept[IllegalArgumentException]( CreateDatabase("").toString ) - } it should "create a database with invalid name" in { CreateDatabase(".Fool").toString should be("CREATE DATABASE `.Fool`") diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTableTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTableTest.scala index 3cebcae7..c86c6af6 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTableTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/schemabuilder/CreateTableTest.scala @@ -7,15 +7,17 @@ import com.crobox.clickhouse.dsl.schemabuilder.Engine.{DistributedEngine, Summin import org.joda.time.LocalDate /** - * @author Sjoerd Mulder + * @author + * Sjoerd Mulder * @since 30-12-16 */ class CreateTableTest extends DslTestSpec { - case class TestTable(override val name: String, - override val columns: Seq[NativeColumn[_]], - override val database: String = "default") - extends Table + case class TestTable( + override val name: String, + override val columns: Seq[NativeColumn[_]], + override val database: String = "default" + ) extends Table it should "deny creating invalid tables and columns" in { intercept[IllegalArgumentException]( @@ -36,13 +38,16 @@ class CreateTableTest extends DslTestSpec { } it should "make add IF NOT EXISTS" in { - CreateTable(TestTable("a", - List( - NativeColumn("b", ColumnType.String) - ), - "b"), - Engine.TinyLog, - ifNotExists = true, + CreateTable( + TestTable( + "a", + List( + NativeColumn("b", ColumnType.String) + ), + "b" + ), + Engine.TinyLog, + ifNotExists = true ).toString should be("""CREATE TABLE IF NOT EXISTS b.a ( | b String |) ENGINE = TinyLog""".stripMargin) @@ -50,12 +55,16 @@ class CreateTableTest extends DslTestSpec { } it should "make add ON CLUSTER" in { - CreateTable(TestTable("a", - List( - NativeColumn("b", ColumnType.String) - )), - Engine.TinyLog, - clusterName = Some("mycluster")).toString should be("""CREATE TABLE default.a ON CLUSTER mycluster ( + CreateTable( + TestTable( + "a", + List( + NativeColumn("b", ColumnType.String) + ) + ), + Engine.TinyLog, + clusterName = Some("mycluster") + ).toString should be("""CREATE TABLE default.a ON CLUSTER mycluster ( | b String |) ENGINE = TinyLog""".stripMargin) @@ -63,11 +72,13 @@ class CreateTableTest extends DslTestSpec { it should "make a valid CREATE TABLE query" in { val result = CreateTable( - TestTable("tiny_log_table", - Seq( - NativeColumn("test_column", ColumnType.String), - NativeColumn("test_column2", ColumnType.Int8, Default("expr")) - )), + TestTable( + "tiny_log_table", + Seq( + NativeColumn("test_column", ColumnType.String), + NativeColumn("test_column2", ColumnType.Int8, Default("expr")) + ) + ), Engine.TinyLog ).toString @@ -133,9 +144,11 @@ class CreateTableTest extends DslTestSpec { testColumn2 ) ), - Engine.MergeTree(Seq(clientId.name, s"toYYYYMM(${date.name})"), - Seq(date, clientId, hitId), - Some("int64Hash(client_id)")) + Engine.MergeTree( + Seq(clientId.name, s"toYYYYMM(${date.name})"), + Seq(date, clientId, hitId), + Some("int64Hash(client_id)") + ) ).toString result should be("""CREATE TABLE default.merge_tree_table ( @@ -203,10 +216,12 @@ class CreateTableTest extends DslTestSpec { versionColumn ) ), - Engine.ReplacingMergeTree(Seq(s"toYYYYMM(${date.name})"), - Seq(date, clientId, hitId), - Some("int64Hash(client_id)"), - version = Option(versionColumn)) + Engine.ReplacingMergeTree( + Seq(s"toYYYYMM(${date.name})"), + Seq(date, clientId, hitId), + Some("int64Hash(client_id)"), + version = Option(versionColumn) + ) ) } @@ -322,9 +337,11 @@ class CreateTableTest extends DslTestSpec { "test_table_agg", Seq(date, clientId, uniqHits) ), - Engine.AggregatingMergeTree(Seq(s"toYYYYMM(${date.name})"), - Seq(date, clientId), - ttl = Option(TTL(date, "3 MONTH"))) + Engine.AggregatingMergeTree( + Seq(s"toYYYYMM(${date.name})"), + Seq(date, clientId), + ttl = Option(TTL(date, "3 MONTH")) + ) ) create.toString should be("""CREATE TABLE default.test_table_agg ( @@ -352,9 +369,11 @@ class CreateTableTest extends DslTestSpec { Engine.AggregatingMergeTree( Seq(s"toYYYYMM(${date.name})"), Seq(date, clientId), - ttl = Iterable(TTL(date, "1 MONTH [DELETE]"), - TTL(date, "1 WEEK TO VOLUME 'aaa'"), - TTL(date, "2 WEEK TO DISK 'bbb'")) + ttl = Iterable( + TTL(date, "1 MONTH [DELETE]"), + TTL(date, "1 WEEK TO VOLUME 'aaa'"), + TTL(date, "2 WEEK TO DISK 'bbb'") + ) ) ) diff --git a/dsl/src/test/scala/com/crobox/clickhouse/misc/DSLImprovementsTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/misc/DSLImprovementsTest.scala index 1be6383b..3481186a 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/misc/DSLImprovementsTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/misc/DSLImprovementsTest.scala @@ -78,7 +78,7 @@ class DSLImprovementsTest extends DslTestSpec { cols.size should be(1) cols should be(Seq(intCol)) - //preserve index at start + // preserve index at start cols = Seq(aliased, stringCol, stringArrayCol, intArrayCol).replaceColumn("alias", intCol) cols.size should be(4) cols should be(Seq(intCol, stringCol, stringArrayCol, intArrayCol)) diff --git a/dsl/src/test/scala/com/crobox/clickhouse/misc/DateTimeConditionTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/misc/DateTimeConditionTest.scala index 64d56748..2d1ba02b 100644 --- a/dsl/src/test/scala/com/crobox/clickhouse/misc/DateTimeConditionTest.scala +++ b/dsl/src/test/scala/com/crobox/clickhouse/misc/DateTimeConditionTest.scala @@ -12,32 +12,48 @@ class DateTimeConditionTest extends DslTestSpec with TableDrivenPropertyChecks { forAll( Table( ("startDate", "endDate", "expected"), - ("2018-01-02T00:00:00Z", - Some("2018-01-05T00:00:00Z"), - "date >= toDate('2018-01-02') AND date < toDate('2018-01-05')"), + ( + "2018-01-02T00:00:00Z", + Some("2018-01-05T00:00:00Z"), + "date >= toDate('2018-01-02') AND date < toDate('2018-01-05')" + ), ("2018-01-02T00:00:00Z", None, "date >= toDate('2018-01-02')"), - ("2018-01-02T02:00:00+02:00", - Some("2018-01-05T02:00:00+02:00"), - "date >= toDate('2018-01-02') AND date < toDate('2018-01-05')"), - ("2018-01-02T22:00:00-02:00", - Some("2018-01-05T22:00:00-02:00"), - "date >= toDate('2018-01-03') AND date < toDate('2018-01-06')"), - ("2018-01-02T22:00:00-02:00", - Some("2018-01-05T23:00:00-02:00"), - "date >= toDate('2018-01-03') AND date <= toDate('2018-01-06') AND ts < 1515200400000"), - ("2018-01-02T23:00:00-02:00", - Some("2018-01-05T22:00:00-02:00"), - "date >= toDate('2018-01-03') AND ts >= 1514941200000 AND date < toDate('2018-01-06')"), + ( + "2018-01-02T02:00:00+02:00", + Some("2018-01-05T02:00:00+02:00"), + "date >= toDate('2018-01-02') AND date < toDate('2018-01-05')" + ), + ( + "2018-01-02T22:00:00-02:00", + Some("2018-01-05T22:00:00-02:00"), + "date >= toDate('2018-01-03') AND date < toDate('2018-01-06')" + ), + ( + "2018-01-02T22:00:00-02:00", + Some("2018-01-05T23:00:00-02:00"), + "date >= toDate('2018-01-03') AND date <= toDate('2018-01-06') AND ts < 1515200400000" + ), + ( + "2018-01-02T23:00:00-02:00", + Some("2018-01-05T22:00:00-02:00"), + "date >= toDate('2018-01-03') AND ts >= 1514941200000 AND date < toDate('2018-01-06')" + ), ("2018-01-02T23:00:00-02:00", None, "date >= toDate('2018-01-03') AND ts >= 1514941200000"), - ("2018-01-02T01:00:00+02:00", - Some("2018-01-05T01:00:00+02:00"), - "date >= toDate('2018-01-01') AND ts >= 1514847600000 AND date <= toDate('2018-01-04') AND ts < 1515106800000"), - ("2018-01-02T00:00:00-01:00", - Some("2018-01-05T00:00:00-01:00"), - "date >= toDate('2018-01-02') AND ts >= 1514854800000 AND date <= toDate('2018-01-05') AND ts < 1515114000000"), - ("2018-01-02T23:00:00-02:00", - Some("2018-01-05T23:00:00-02:00"), - "date >= toDate('2018-01-03') AND ts >= 1514941200000 AND date <= toDate('2018-01-06') AND ts < 1515200400000"), + ( + "2018-01-02T01:00:00+02:00", + Some("2018-01-05T01:00:00+02:00"), + "date >= toDate('2018-01-01') AND ts >= 1514847600000 AND date <= toDate('2018-01-04') AND ts < 1515106800000" + ), + ( + "2018-01-02T00:00:00-01:00", + Some("2018-01-05T00:00:00-01:00"), + "date >= toDate('2018-01-02') AND ts >= 1514854800000 AND date <= toDate('2018-01-05') AND ts < 1515114000000" + ), + ( + "2018-01-02T23:00:00-02:00", + Some("2018-01-05T23:00:00-02:00"), + "date >= toDate('2018-01-03') AND ts >= 1514941200000 AND date <= toDate('2018-01-06') AND ts < 1515200400000" + ) ) ) { (startDateTime, endDateTime, expected) => it should s"resolve $startDateTime/$endDateTime to correct query" in { diff --git a/project/Config.scala b/project/Config.scala index 52dc8246..02c0fdd3 100644 --- a/project/Config.scala +++ b/project/Config.scala @@ -7,17 +7,17 @@ object Config { private lazy val testAll = TaskKey[Unit]("tests") private lazy val unitSettings = Seq( - Test / fork := true, + Test / fork := true, Test / parallelExecution := false ) private lazy val itSettings = - inConfig(CustomIntegrationTest)(Defaults.testSettings) ++ - Seq( - CustomIntegrationTest / fork := false, - CustomIntegrationTest / parallelExecution := false, - CustomIntegrationTest / scalaSource := baseDirectory.value / "src/it/scala" - ) ++ inConfig(IntegrationTest)(Defaults.testSettings) + inConfig(CustomIntegrationTest)(Defaults.testSettings) ++ + Seq( + CustomIntegrationTest / fork := false, + CustomIntegrationTest / parallelExecution := false, + CustomIntegrationTest / scalaSource := baseDirectory.value / "src/it/scala" + ) ++ inConfig(IntegrationTest)(Defaults.testSettings) lazy val testSettings = itSettings ++ unitSettings ++ Seq( testAll := (CustomIntegrationTest / test).dependsOn(Test / test).value diff --git a/project/plugins.sbt b/project/plugins.sbt index 774691cd..38ef3359 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,4 +1,4 @@ -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12") -addSbtPlugin("com.github.sbt" % "sbt-release" % "1.4.0") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.0") \ No newline at end of file +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12") +addSbtPlugin("com.github.sbt" % "sbt-release" % "1.4.0") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.0") diff --git a/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseMatchers.scala b/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseMatchers.scala index 7f346430..3fbeec50 100644 --- a/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseMatchers.scala +++ b/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseMatchers.scala @@ -15,9 +15,10 @@ trait ClickhouseMatchers { private def diff(s1: String, s2: String): String = s1.zip(s2).map(tuple => if (tuple._1 == tuple._2) '_' else tuple._1).mkString("") - def matchSQL(expected: String): Matcher[String] = (left: String) => MatchResult( - clean(left) == clean(expected), - s""" + def matchSQL(expected: String): Matcher[String] = (left: String) => + MatchResult( + clean(left) == clean(expected), + s""" |SQL messages don't match. |${clean(left)} |!= @@ -25,6 +26,6 @@ trait ClickhouseMatchers { | |${diff(clean(left), clean(expected))} |""".stripMargin, - "SQL messages are equal" - ) + "SQL messages are equal" + ) } diff --git a/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseSpec.scala b/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseSpec.scala index 35cd8a24..cb32a64c 100644 --- a/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseSpec.scala +++ b/testkit/src/main/scala/com/crobox/clickhouse/testkit/ClickhouseSpec.scala @@ -33,9 +33,12 @@ trait ClickhouseSpec extends SuiteMixin with BeforeAndAfter with BeforeAndAfterA internalClient.execute(query) } Await - .result(result.recoverWith { - case e: Throwable => Future.successful(e.getMessage) - }(ExecutionContext.Implicits.global), clickhouseSpecTimeout) + .result( + result.recoverWith { case e: Throwable => + Future.successful(e.getMessage) + }(ExecutionContext.Implicits.global), + clickhouseSpecTimeout + ) .trim() } @@ -90,9 +93,9 @@ trait ClickhouseSpec extends SuiteMixin with BeforeAndAfter with BeforeAndAfterA Thread.sleep(total) } backoff = backoff + 1 - try { + try done = predicate() - } catch { + catch { case _: Throwable => } } @@ -124,10 +127,14 @@ trait ClickhouseSpec extends SuiteMixin with BeforeAndAfter with BeforeAndAfterA lazy val ClickHouseVersion: ClickhouseServerVersion = clickClient.serverVersion def assumeMinimalClickhouseVersion(version: Int): Assertion = - assume(ClickHouseVersion.minimalVersion(version), - s"ClickhouseVersion: $ClickHouseVersion >= $version does NOT hold") + assume( + ClickHouseVersion.minimalVersion(version), + s"ClickhouseVersion: $ClickHouseVersion >= $version does NOT hold" + ) def assumeMinimalClickhouseVersion(version: Int, subVersion: Int): Assertion = - assume(ClickHouseVersion.minimalVersion(version, subVersion), - s"ClickhouseVersion: $ClickHouseVersion >= $version.$subVersion does NOT hold") + assume( + ClickHouseVersion.minimalVersion(version, subVersion), + s"ClickhouseVersion: $ClickHouseVersion >= $version.$subVersion does NOT hold" + ) } diff --git a/testkit/src/test/scala/com/crobox/clickhouse/testkit/ClickhouseSpecSpec.scala b/testkit/src/test/scala/com/crobox/clickhouse/testkit/ClickhouseSpecSpec.scala index e3c334ed..bc40c7fb 100644 --- a/testkit/src/test/scala/com/crobox/clickhouse/testkit/ClickhouseSpecSpec.scala +++ b/testkit/src/test/scala/com/crobox/clickhouse/testkit/ClickhouseSpecSpec.scala @@ -5,9 +5,10 @@ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers /** - * @author Sjoerd Mulder - * @since 22-10-18 - */ + * @author + * Sjoerd Mulder + * @since 22-10-18 + */ class ClickhouseSpecSpec extends AnyFlatSpecLike with Matchers with ClickhouseSpec { override val config: Config = ConfigFactory.load() @@ -27,9 +28,9 @@ class ClickhouseSpecSpec extends AnyFlatSpecLike with Matchers with ClickhouseSp blockUntilRowsInTable(2, table) blockUntilExactRowsInTable(3, table) - dropAllTables() should be (1) + dropAllTables() should be(1) - dropAllTables() should be (0) + dropAllTables() should be(0) }