Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit 728ba0a

Browse files
duyanghaomccheah
authored andcommitted
Set ENV_DRIVER_MEMORY to memory instead of memory+overhead (#475)
* Set ENV_DRIVER_MEMORY to memory instead of memory+overhead Signed-off-by: duyanghao <[email protected]> * Restore test
1 parent d710563 commit 728ba0a

File tree

2 files changed

+8
-5
lines changed

2 files changed

+8
-5
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/BaseDriverConfigurationStep.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,9 @@ private[spark] class BaseDriverConfigurationStep(
4848
// Memory settings
4949
private val driverMemoryMiB = submissionSparkConf.get(
5050
org.apache.spark.internal.config.DRIVER_MEMORY)
51+
private val driverMemoryString = submissionSparkConf.get(
52+
org.apache.spark.internal.config.DRIVER_MEMORY.key,
53+
org.apache.spark.internal.config.DRIVER_MEMORY.defaultValueString)
5154
private val memoryOverheadMiB = submissionSparkConf
5255
.get(KUBERNETES_DRIVER_MEMORY_OVERHEAD)
5356
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * driverMemoryMiB).toInt,
@@ -102,7 +105,7 @@ private[spark] class BaseDriverConfigurationStep(
102105
.addToEnv(driverExtraClasspathEnv.toSeq: _*)
103106
.addNewEnv()
104107
.withName(ENV_DRIVER_MEMORY)
105-
.withValue(driverContainerMemoryWithOverheadMiB + "M") // JVM treats the "M" unit as "Mi"
108+
.withValue(driverMemoryString)
106109
.endEnv()
107110
.addNewEnv()
108111
.withName(ENV_DRIVER_MAIN_CLASS)

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/BaseDriverConfigurationStepSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,10 @@ private[spark] class BaseDriverConfigurationStepSuite extends SparkFunSuite {
4242
test("Set all possible configurations from the user.") {
4343
val sparkConf = new SparkConf()
4444
.set(KUBERNETES_DRIVER_POD_NAME, "spark-driver-pod")
45-
.set(org.apache.spark.internal.config.DRIVER_CLASS_PATH, "/opt/spark/spark-exmaples.jar")
45+
.set(org.apache.spark.internal.config.DRIVER_CLASS_PATH, "/opt/spark/spark-examples.jar")
4646
.set("spark.driver.cores", "2")
4747
.set(KUBERNETES_DRIVER_LIMIT_CORES, "4")
48-
.set(org.apache.spark.internal.config.DRIVER_MEMORY, 256L)
48+
.set(org.apache.spark.internal.config.DRIVER_MEMORY.key, "256M")
4949
.set(KUBERNETES_DRIVER_MEMORY_OVERHEAD, 200L)
5050
.set(DRIVER_DOCKER_IMAGE, "spark-driver:latest")
5151
.set(s"spark.kubernetes.driver.annotation.$CUSTOM_ANNOTATION_KEY", CUSTOM_ANNOTATION_VALUE)
@@ -80,8 +80,8 @@ private[spark] class BaseDriverConfigurationStepSuite extends SparkFunSuite {
8080
.map(env => (env.getName, env.getValue))
8181
.toMap
8282
assert(envs.size === 6)
83-
assert(envs(ENV_SUBMIT_EXTRA_CLASSPATH) === "/opt/spark/spark-exmaples.jar")
84-
assert(envs(ENV_DRIVER_MEMORY) === "456M")
83+
assert(envs(ENV_SUBMIT_EXTRA_CLASSPATH) === "/opt/spark/spark-examples.jar")
84+
assert(envs(ENV_DRIVER_MEMORY) === "256M")
8585
assert(envs(ENV_DRIVER_MAIN_CLASS) === MAIN_CLASS)
8686
assert(envs(ENV_DRIVER_ARGS) === "arg1 arg2")
8787
assert(envs(DRIVER_CUSTOM_ENV_KEY1) === "customDriverEnv1")

0 commit comments

Comments
 (0)