Skip to content

[wip] feat: Add framework for supporting multiple telemetry providers #1722

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 9 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions common/src/main/scala/org/apache/comet/CometConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -439,6 +439,14 @@ object CometConf extends ShimCometConf {
.booleanConf
.createWithDefault(false)

val COMET_TELEMETRY_PROVIDER: ConfigEntry[String] =
conf("spark.comet.telemetry.provider")
.doc("Experimental support for configuring an optional telemetry provider.")
.internal()
.stringConf
.checkValues(Set("otel", "chrome", "none"))
.createWithDefault("none")

val COMET_EXPLAIN_VERBOSE_ENABLED: ConfigEntry[Boolean] =
conf("spark.comet.explain.verbose.enabled")
.doc(
Expand Down
27 changes: 27 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,33 @@ under the License.
<scope>${spark.maven.scope}</scope>
</dependency>

<!-- OpenTelemetry -->
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-api</artifactId>
<version>1.38.0</version>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-sdk</artifactId>
<version>1.38.0</version>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-sdk-extension-autoconfigure</artifactId>
<version>1.38.0</version>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-exporter-otlp</artifactId>
<version>1.38.0</version>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-semconv</artifactId>
<version>1.30.1-alpha</version>
</dependency>

<!-- Shaded deps marked as provided. These are promoted to compile scope
in the modules where we want the shaded classes to appear in the
associated jar. -->
Expand Down
24 changes: 24 additions & 0 deletions spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,30 @@ under the License.
</exclusion>
</exclusions>
</dependency>

<!-- OpenTelemetry -->
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-api</artifactId>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-sdk</artifactId>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-sdk-extension-autoconfigure</artifactId>
<version>1.38.0</version>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-exporter-otlp</artifactId>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-semconv</artifactId>
</dependency>

</dependencies>

<build>
Expand Down
36 changes: 14 additions & 22 deletions spark/src/main/scala/org/apache/comet/CometExecIterator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.apache.spark.sql.comet.CometMetricNode
import org.apache.spark.sql.vectorized._

import org.apache.comet.CometConf.{COMET_BATCH_SIZE, COMET_DEBUG_ENABLED, COMET_EXEC_MEMORY_POOL_TYPE, COMET_EXPLAIN_NATIVE_ENABLED, COMET_METRICS_UPDATE_INTERVAL}
import org.apache.comet.telemetry.TelemetryProvider
import org.apache.comet.vector.NativeUtil

/**
Expand Down Expand Up @@ -55,7 +56,8 @@ class CometExecIterator(
protobufQueryPlan: Array[Byte],
nativeMetrics: CometMetricNode,
numParts: Int,
partitionIndex: Int)
partitionIndex: Int,
telemetryProvider: TelemetryProvider)
extends Iterator[ColumnarBatch]
with Logging {

Expand Down Expand Up @@ -131,30 +133,20 @@ class CometExecIterator(
}
}

val mXBean = ManagementFactory.getMemoryMXBean

def getNextBatch(): Option[ColumnarBatch] = {
assert(partitionIndex >= 0 && partitionIndex < numParts)

if (memoryProfilingEnabled) {
val memoryMXBean = ManagementFactory.getMemoryMXBean
val heap = memoryMXBean.getHeapMemoryUsage
val nonHeap = memoryMXBean.getNonHeapMemoryUsage

def mb(n: Long) = n / 1024 / 1024

// scalastyle:off println
println(
"JVM_MEMORY: { " +
s"heapUsed: ${mb(heap.getUsed)}, heapCommitted: ${mb(heap.getCommitted)}, " +
s"nonHeapUsed: ${mb(nonHeap.getUsed)}, nonHeapCommitted: ${mb(nonHeap.getCommitted)} " +
"}")
// scalastyle:on println
}

nativeUtil.getNextBatch(
numOutputCols,
(arrayAddrs, schemaAddrs) => {
val ctx = TaskContext.get()
nativeLib.executePlan(ctx.stageId(), partitionIndex, plan, arrayAddrs, schemaAddrs)
telemetryProvider.setGauge("jvmHeap", mXBean.getHeapMemoryUsage.getUsed)
telemetryProvider.withSpan[Option[ColumnarBatch]](
"CometExecIterator.getNextBatch", {
nativeUtil.getNextBatch(
numOutputCols,
(arrayAddrs, schemaAddrs) => {
val ctx = TaskContext.get()
nativeLib.executePlan(ctx.stageId(), partitionIndex, plan, arrayAddrs, schemaAddrs)
})
})
}

Expand Down
6 changes: 6 additions & 0 deletions spark/src/main/scala/org/apache/comet/Native.scala
Original file line number Diff line number Diff line change
Expand Up @@ -175,4 +175,10 @@ class Native extends NativeBase {
arrayAddrs: Array[Long],
schemaAddrs: Array[Long]): Long

@native def traceBegin(name: String): Unit

@native def traceEnd(name: String): Unit

@native def logCounter(name: String, value: Int): Unit

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.comet.telemetry

import java.io.{BufferedWriter, FileWriter}

import org.apache.spark.internal.Logging

/**
* Default provider that writes telemetry in Chrome Trace Event Format.
*/
object ChromeTelemetryProvider extends TelemetryProvider with Serializable with Logging {

lazy val pid: Long = {
val processName = java.lang.management.ManagementFactory.getRuntimeMXBean.getName
processName.split("@")(0).toLong
}

lazy val writer: BufferedWriter = {
val w = new BufferedWriter(
new FileWriter(s"comet-events-$pid-${System.currentTimeMillis()}.log"))
w.append('[')
// scalastyle:off runtimeaddshutdownhook
Runtime.getRuntime.addShutdownHook(new Thread(() => {
try {
w.close
} catch {
case e =>
logError("Error closing Comet event trace log", e)
}
}))
// scalastyle:on runtimeaddshutdownhook
w
}

override def startSpan(name: String): Span = new ChromeSpan(name)

override def setGauge(name: String, value: Long): Unit = {
val threadId = Thread.currentThread().getId
val ts = System.currentTimeMillis()
ChromeTelemetryProvider.synchronized {
// scalastyle:off
writer.write(
s"""{ "name": "$name", "cat": "PERF", "ph": "C", "pid": $pid, "tid": "$threadId", "ts": $ts, "args": { "$name": "$value" } },\n""".stripMargin)
// scalastyle:on
}
}

private class ChromeSpan(name: String) extends Span {
logEvent(name, "B")

override def end(): Unit = {
logEvent(name, "E")
}

private def logEvent(name: String, ph: String): Unit = {
val threadId = Thread.currentThread().getId
val ts = System.currentTimeMillis()
ChromeTelemetryProvider.synchronized {
// scalastyle:off
writer.write(
s"""{ "name": "$name", "cat": "PERF", "ph": "$ph", "pid": $pid, "tid": "$threadId", "ts": $ts },\n""".stripMargin)
// scalastyle:on
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.comet.telemetry

class NoopTelemetryProvider extends TelemetryProvider with Serializable {

override def startSpan(name: String): Span = NoopSpan

override def setGauge(name: String, value: Long): Unit = {}

private object NoopSpan extends Span {
override def end(): Unit = {}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.comet.telemetry

import io.opentelemetry.sdk.OpenTelemetrySdk
import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdk

object OpenTelemetryProvider extends TelemetryProvider with Serializable {

lazy val sdk: OpenTelemetrySdk = {
AutoConfiguredOpenTelemetrySdk.initialize().getOpenTelemetrySdk
}

override def setGauge(name: String, value: Long): Unit = {
sdk.getMeterProvider.meterBuilder("Comet").build().gaugeBuilder(name).build().set(value)
}

override def startSpan(name: String): Span = {
new OpenTelemetrySpan(sdk.tracerBuilder("Comet").build().spanBuilder(name).startSpan())
}

class OpenTelemetrySpan(span: io.opentelemetry.api.trace.Span) extends Span {
override def end(): Unit = {
span.end()
}
}
}
26 changes: 26 additions & 0 deletions spark/src/main/scala/org/apache/comet/telemetry/Span.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.comet.telemetry

trait Span {

def end(): Unit

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.comet.telemetry

import org.apache.spark.sql.internal.SQLConf

import org.apache.comet.CometConf

trait TelemetryProvider {

def setGauge(name: String, value: Long): Unit

def startSpan(name: String): Span

def withSpan[T](name: String, fun: => T): T = {
val span = startSpan(name)
try {
fun
} finally {
span.end()
}
}

}

object TelemetryProviderFactory {
def create(conf: SQLConf): TelemetryProvider = {
CometConf.COMET_TELEMETRY_PROVIDER.get(conf) match {
case "chrome" => ChromeTelemetryProvider
case "otel" => OpenTelemetryProvider
case _ => new NoopTelemetryProvider
}
}
}
Loading
Loading