Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.linkis.common.conf

import org.apache.linkis.common.utils.{ParameterUtils, Logging}
import org.apache.linkis.common.utils.{Logging, ParameterUtils}

import org.apache.commons.lang3.StringUtils

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,18 @@ import org.apache.linkis.common.conf.Configuration
import org.apache.linkis.common.exception.LinkisCommonErrorException
import org.apache.linkis.common.variable
import org.apache.linkis.common.variable._
import org.apache.linkis.common.variable.DateTypeUtils.{getCurHour, getMonthDay, getToday, getYesterday}
import org.apache.commons.lang3.{StringUtils, Strings}
import org.apache.linkis.common.variable.DateTypeUtils.{
getCurHour,
getMonthDay,
getToday,
getYesterday
}

import org.apache.commons.lang3.{Strings, StringUtils}

import java.time.ZonedDateTime
import java.util

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.control.Exception.allCatch
Expand Down Expand Up @@ -115,7 +122,9 @@ object VariableUtils extends Logging {
case _ =>
if (!nameAndType.contains(key) && StringUtils.isNotEmpty(value)) {
// if ((allCatch opt value.toDouble).isDefined) {
if ((allCatch opt BigDecimal(value)).isDefined && !Strings.CS.startsWith(value, "0")) {
if (
(allCatch opt BigDecimal(value)).isDefined && !Strings.CS.startsWith(value, "0")
) {
nameAndType(key) = variable.BigDecimalValue(BigDecimal(value))
} else {
nameAndType(key) = variable.StringType(value)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ case class YearType(value: CustomYearType) extends VariableType {
}

case class BigDecimalValue(value: BigDecimal) extends VariableType {

override def getValue: String = {
val result = bigDecimalOrLong(value)
result match {
Expand All @@ -126,7 +127,10 @@ case class BigDecimalValue(value: BigDecimal) extends VariableType {
case "*" => val res = value * BigDecimal(bValue); formatResult(res)
case "/" => val res = value / BigDecimal(bValue); formatResult(res)
case _ =>
throw new LinkisCommonErrorException(20050, s"BigDecimal class is not supported to use:$signal")
throw new LinkisCommonErrorException(
20050,
s"BigDecimal class is not supported to use:$signal"
)
}
}

Expand All @@ -146,6 +150,7 @@ case class BigDecimalValue(value: BigDecimal) extends VariableType {
bd
}
}

}

case class LongType(value: Long) extends VariableType {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ object StorageConfiguration {
val STORAGE_BUILD_FS_CLASSES = CommonVars(
"wds.linkis.storage.build.fs.classes",
"org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem," +
"org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem," +
"org.apache.linkis.storage.factory.impl.BuildAzureBlobFileSystem"
"org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem," +
"org.apache.linkis.storage.factory.impl.BuildAzureBlobFileSystem"
)

val IS_SHARE_NODE = CommonVars("wds.linkis.storage.is.share.node", true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,9 @@ object StorageUtils extends Logging {
* @return
*/
def getFsPath(path: String): FsPath = {
if (path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA) || path.startsWith(BLOB_SCHEMA)) new FsPath(path)
if (
path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA) || path.startsWith(BLOB_SCHEMA)
) new FsPath(path)
else {
new FsPath(FILE_SCHEMA + path)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.linkis.governance.common.utils

import org.apache.linkis.common.utils.{ParameterUtils, Logging}
import org.apache.linkis.common.utils.{Logging, ParameterUtils}

import org.apache.commons.lang3.StringUtils

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package org.apache.linkis.manager.engineplugin.common.conf

import org.apache.linkis.common.conf.{ByteType, CommonVars, Configuration}

import org.apache.commons.lang3.{JavaVersion, SystemUtils}

object EnvConfiguration {

val HIVE_CONF_DIR = CommonVars[String](
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.linkis.entrance.interceptor.impl;

import org.apache.linkis.governance.common.entity.job.JobRequest;

import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -55,24 +56,21 @@ void isSelectOverLimit() {
}

/**
* 未修复前代码进行拼接sql时,输出的sql为
* select
* id,
* name,
* array_join(array_intersect(map_keys(info),array['abs','oda'],' limit 5000;
* ') as infos
* from ods.dim_ep22
* 未修复前代码进行拼接sql时,输出的sql为 select id, name,
* array_join(array_intersect(map_keys(info),array['abs','oda'],' limit 5000; ') as infos from
* ods.dim_ep22
*/
@Test
void splicingLimitSql() {
String code = "select\n" +
"id,\n" +
"name,\n" +
"array_join(array_intersect(map_keys(info),array['abs','oda'],';') as infos\n" +
"from ods.dim_ep22";
String code =
"select\n"
+ "id,\n"
+ "name,\n"
+ "array_join(array_intersect(map_keys(info),array['abs','oda'],';') as infos\n"
+ "from ods.dim_ep22";
StringBuilder logAppender = new StringBuilder();
JobRequest jobRequest = new JobRequest();
SQLExplain.dealSQLLimit(code, jobRequest, logAppender);
Assertions.assertEquals(code+" limit 5000", jobRequest.getExecutionCode());
Assertions.assertEquals(code + " limit 5000", jobRequest.getExecutionCode());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ class HiveEngineConcurrentConnExecutor(
code: String
): ExecuteResponse = {
LOG.info(s"HiveEngineConcurrentConnExecutor Ready to executeLine: $code")
val taskId: String = engineExecutorContext.getJobId.get
val taskId: String = engineExecutorContext.getJobId.getOrElse("udf_init")
CSHiveHelper.setContextIDInfoToHiveConf(engineExecutorContext, hiveConf)

val realCode = code.trim()
Expand Down Expand Up @@ -166,12 +166,7 @@ class HiveEngineConcurrentConnExecutor(

val driver = new HiveDriverProxy(any)
driverCache.put(taskId, driver)
executeHQL(
engineExecutorContext.getJobId.get,
engineExecutorContext,
realCode,
driver
)
executeHQL(taskId, engineExecutorContext, realCode, driver)
case _ =>
val resp = proc.run(realCode.substring(tokens(0).length).trim)
val result = new String(baos.toByteArray)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@ import org.apache.linkis.engineconn.common.engineconn.EngineConn
import org.apache.linkis.engineconn.common.hook.EngineConnHook
import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
import org.apache.linkis.engineconn.core.executor.ExecutorManager
import org.apache.linkis.engineplugin.hive.executor.HiveEngineConnExecutor
import org.apache.linkis.engineplugin.hive.executor.{
HiveEngineConcurrentConnExecutor,
HiveEngineConnExecutor
}
import org.apache.linkis.manager.engineplugin.common.launch.process.Environment
import org.apache.linkis.manager.label.entity.Label
import org.apache.linkis.manager.label.entity.engine.{CodeLanguageLabel, RunType}
Expand Down Expand Up @@ -78,7 +81,12 @@ class HiveAddJarsEngineHook extends EngineConnHook with Logging {
ExecutorManager.getInstance.getExecutorByLabels(labels) match {
case executor: HiveEngineConnExecutor =>
executor.executeLine(new EngineExecutionContext(executor), sql)
logger.info("use hive none concurrent mode.")
case executor: HiveEngineConcurrentConnExecutor =>
executor.executeLine(new EngineExecutionContext(executor), sql)
logger.info("use hive concurrent mode.")
case _ =>
logger.warn(s"Executor is not a ComputationExecutor, skip adding jar: $jar")
}
} catch {
case t: Throwable => logger.error(s"run hive sql ${addSql + jar} failed", t)
Expand Down
Loading