Skip to content

Commit

Permalink
fix excel load into hive & log optimize
Browse files Browse the repository at this point in the history
  • Loading branch information
casionone committed Nov 13, 2023
1 parent fd5e7f6 commit 2df288c
Show file tree
Hide file tree
Showing 6 changed files with 45 additions and 4 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.linkis.governance.common.constant;

public class CodeConstants {
// will auto append at end of scala code; make sure the last line is not a comment
public static String SCALA_CODE_AUTO_APPEND_CODE = "val linkisVar=123";
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.linkis.governance.common.paser

import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils}
import org.apache.linkis.governance.common.conf.GovernanceCommonConf
import org.apache.linkis.governance.common.constant.CodeConstants
import org.apache.linkis.governance.common.paser.CodeType.CodeType

import org.apache.commons.lang3.StringUtils
Expand Down Expand Up @@ -116,7 +117,7 @@ class ScalaCodeParser extends SingleCodeParser with Logging {
if (statementBuffer.nonEmpty) codeBuffer.append(statementBuffer.mkString("\n"))
// Make sure the last line is not a comment
codeBuffer.append("\n")
codeBuffer.append("val linkisVar=123")
codeBuffer.append(CodeConstants.SCALA_CODE_AUTO_APPEND_CODE)
codeBuffer.toArray
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ public void run() {
} else {
if (concurrentExecutor.isIdle())
synchronized (EXECUTOR_STATUS_LOCKER) {
LOG.info("monitor turn to executor status from busy to unlock");
LOG.info("monitor turn to executor status from unlock to busy");
concurrentExecutor.transition(NodeStatus.Busy);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class MultiUserEngineReuseLabelChooser extends EngineReuseLabelChooser with Logg
val userAdmin = userMap.get(engineTypeLabel.getEngineType)
val userCreatorLabel = userCreatorLabelOption.get.asInstanceOf[UserCreatorLabel]
logger.info(
s"For multi user engine to reset userCreatorLabel user ${userCreatorLabel.getUser} to Admin $userAdmin "
s"For multi user engine to reset userCreatorLabel user ${userCreatorLabel.getUser} to admin $userAdmin "
)
userCreatorLabel.setUser(userAdmin)
return labels.asJava
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ import org.apache.linkis.engineconn.computation.executor.execute.EngineExecution
import org.apache.linkis.engineplugin.spark.common.SparkKind
import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
import org.apache.linkis.engineplugin.spark.extension.SparkPostExecutionHook
import org.apache.linkis.governance.common.constant.CodeConstants
import org.apache.linkis.governance.common.constant.job.TaskInfoConstants
import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel
import org.apache.linkis.protocol.mdq.{DDLCompleteResponse, DDLExecuteResponse}
import org.apache.linkis.rpc.Sender
Expand Down Expand Up @@ -49,6 +51,12 @@ class MDQPostExecutionHook extends SparkPostExecutionHook with Logging {
executeResponse: ExecuteResponse,
code: String
): Unit = {
if (StringUtils.isBlank(code)) {
return
}
if (CodeConstants.SCALA_CODE_AUTO_APPEND_CODE.equalsIgnoreCase(code.trim)) {
return
}
val codeLanguageLabel = engineExecutionContext.getLabels
.filter(l => null != l && l.isInstanceOf[CodeLanguageLabel])
.head
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,15 @@ import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
import org.apache.linkis.engineplugin.spark.exception.MDQErrorException
import org.apache.linkis.engineplugin.spark.extension.SparkPreExecutionHook
import org.apache.linkis.governance.common.constant.CodeConstants
import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel
import org.apache.linkis.protocol.mdq.{DDLRequest, DDLResponse}
import org.apache.linkis.rpc.Sender
import org.apache.linkis.storage.utils.StorageUtils

import org.apache.commons.lang3.StringUtils

import org.springframework.stereotype.Component
import org.springframework.util.StringUtils

import javax.annotation.PostConstruct

Expand Down Expand Up @@ -63,6 +65,13 @@ class MDQPreExecutionHook extends SparkPreExecutionHook with Logging {
if (StringUtils.isEmpty(runType) || !SparkKind.FUNCTION_MDQ_TYPE.equalsIgnoreCase(runType)) {
return code
}
if (StringUtils.isBlank(code)) {
return code
}
if (CodeConstants.SCALA_CODE_AUTO_APPEND_CODE.equalsIgnoreCase(code.trim)) {
return code
}

val sender = Sender.getSender(SparkConfiguration.MDQ_APPLICATION_NAME.getValue)
val params = new util.HashMap[String, Object]()
params.put("user", StorageUtils.getJvmUser)
Expand Down

0 comments on commit 2df288c

Please sign in to comment.