Skip to content

Commit

Permalink
code optimize after code review
Browse files Browse the repository at this point in the history
  • Loading branch information
casionone committed Dec 13, 2023
1 parent f12436c commit 465a6b1
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ import org.slf4j.Logger

object Utils extends Logging {

val DEFAULE_SCHEDULER_THREAD_NAME_PREFIX = "Linkis-Default-Scheduler-Thread-"

def tryQuietly[T](tryOp: => T): T = tryQuietly(tryOp, _ => ())

def tryCatch[T](tryOp: => T)(catchOp: Throwable => T): T = {
Expand Down Expand Up @@ -199,7 +201,7 @@ object Utils extends Logging {

val defaultScheduler: ScheduledThreadPoolExecutor = {
val scheduler =
new ScheduledThreadPoolExecutor(20, threadFactory("Linkis-Default-Scheduler-Thread-", true))
new ScheduledThreadPoolExecutor(20, threadFactory(DEFAULE_SCHEDULER_THREAD_NAME_PREFIX, true))
scheduler.setMaximumPoolSize(20)
scheduler.setKeepAliveTime(5, TimeUnit.MINUTES)
scheduler
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long)

if (closeThreadEnable) {
val threadName = thread.getName
if (threadName.contains("Linkis-Default-Scheduler-Thread-")) {
if (threadName.contains(Utils.DEFAULE_SCHEDULER_THREAD_NAME_PREFIX)) {
logger.info(s"try to force stop thread:${threadName}")
// force to stop scala thread
Utils.tryAndWarn(thread.stop())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In
override def init(): Unit = {
setCodeParser(new PythonCodeParser)
super.init()
logger.info("spark sql executor start")
logger.info("spark python executor start")
}

override def killTask(taskID: String): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ class SparkScalaExecutor(sparkEngineSession: SparkEngineSession, id: Long)
System.setProperty("scala.repl.name.line", ("$line" + this.hashCode).replace('-', '0'))
setCodeParser(new ScalaCodeParser)
super.init()
logger.info("spark sql executor start")
logger.info("spark scala executor start")
}

def lazyInitLoadILoop(): Unit = {
Expand Down

0 comments on commit 465a6b1

Please sign in to comment.