From 465a6b11e63be935450b027d4ad3fe992cc951de Mon Sep 17 00:00:00 2001 From: casionone Date: Wed, 13 Dec 2023 21:37:54 +0800 Subject: [PATCH] code optimize after code review --- .../src/main/scala/org/apache/linkis/common/utils/Utils.scala | 4 +++- .../engineplugin/spark/executor/SparkEngineConnExecutor.scala | 2 +- .../engineplugin/spark/executor/SparkPythonExecutor.scala | 2 +- .../engineplugin/spark/executor/SparkScalaExecutor.scala | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala index 80e3ff7e5e..deac2f2464 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala @@ -43,6 +43,8 @@ import org.slf4j.Logger object Utils extends Logging { + val DEFAULE_SCHEDULER_THREAD_NAME_PREFIX = "Linkis-Default-Scheduler-Thread-" + def tryQuietly[T](tryOp: => T): T = tryQuietly(tryOp, _ => ()) def tryCatch[T](tryOp: => T)(catchOp: Throwable => T): T = { @@ -199,7 +201,7 @@ object Utils extends Logging { val defaultScheduler: ScheduledThreadPoolExecutor = { val scheduler = - new ScheduledThreadPoolExecutor(20, threadFactory("Linkis-Default-Scheduler-Thread-", true)) + new ScheduledThreadPoolExecutor(20, threadFactory(DEFAULE_SCHEDULER_THREAD_NAME_PREFIX, true)) scheduler.setMaximumPoolSize(20) scheduler.setKeepAliveTime(5, TimeUnit.MINUTES) scheduler diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala index 95c18b3ca9..d391faf4c5 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala @@ -316,7 +316,7 @@ abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) if (closeThreadEnable) { val threadName = thread.getName - if (threadName.contains("Linkis-Default-Scheduler-Thread-")) { + if (threadName.contains(Utils.DEFAULE_SCHEDULER_THREAD_NAME_PREFIX)) { logger.info(s"try to force stop thread:${threadName}") // force to stop scala thread Utils.tryAndWarn(thread.stop()) diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala index 083a82e9f6..aef5d2dfdd 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala @@ -102,7 +102,7 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In override def init(): Unit = { setCodeParser(new PythonCodeParser) super.init() - logger.info("spark sql executor start") + logger.info("spark python executor start") } override def killTask(taskID: String): Unit = { diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala index 6bba4cb371..44bd74f074 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala @@ -105,7 +105,7 @@ class SparkScalaExecutor(sparkEngineSession: SparkEngineSession, id: Long) System.setProperty("scala.repl.name.line", ("$line" + this.hashCode).replace('-', '0')) setCodeParser(new ScalaCodeParser) super.init() - logger.info("spark sql executor start") + logger.info("spark scala executor start") } def lazyInitLoadILoop(): Unit = {