diff --git a/.github/workflows/check-license.yml b/.github/workflows/check-license.yml index 3c79607dc32..2a6cf67f232 100644 --- a/.github/workflows/check-license.yml +++ b/.github/workflows/check-license.yml @@ -36,7 +36,7 @@ jobs: echo "rat_file=$rat_file" if [[ -n "$rat_file" ]];then echo "check error!" && cat $rat_file && exit 123;else echo "check success!" ;fi - name: Upload the report - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: license-check-report path: "**/target/rat.txt" diff --git a/.github/workflows/check-third-party-dependencies.yml b/.github/workflows/check-third-party-dependencies.yml deleted file mode 100644 index bcf4a371c3c..00000000000 --- a/.github/workflows/check-third-party-dependencies.yml +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -name: Third-party Dependencies Check - -on: [push, pull_request] - -env: - MAVEN_OPTS: -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 - -jobs: - third-party-dependencies-check-: - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v2 - - name: Set up JDK 8 - uses: actions/setup-java@v2 - with: - java-version: '8' - distribution: 'adopt' - - name: mvn install - run: - #pom.xml also introduce linkis related jar,so run mvn install in first time - ./mvnw install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true - - name: mvn dependency:copy-dependencies - run: - ./mvnw dependency:copy-dependencies -DincludeScope=runtime -DoutputDirectory=${{ github.workspace }}/current_dependencies - - name: generate current_dependencies.txt - run: | - ls ${{ github.workspace }}/current_dependencies |egrep -v "^linkis" |sort > ~/current_dependencies.txt - cat ~/current_dependencies.txt - - name: check third dependencies - run: | - #by using commond join ,to check whether there are new third-party dependencies,compared with file(tool/dependencies/known-dependencies.txt) - sort ${{ github.workspace }}/tool/dependencies/known-dependencies.txt > ~/known-dependencies.txt - join -t : -o 1.1 2.1 -a2 ~/known-dependencies.txt ~/current_dependencies.txt > ~/result.txt - #print new third-party dependencies name if it exists - awk -F ":" '{if($1=="")print $2" is not in file known-dependencies.txt!\n You can refer to this guide to repair it(你可以参考这个执行进行修复):https://linkis.apache.org/zh-CN/docs/latest/development/development-specification/license"}' ~/result.txt - result=`awk -F ":" '{if($1=="")print $2}' ~/result.txt |wc -l` - #if has new third-party,the Action will fail - if [[ $result == 0 ]];then echo "All third dependencies is known!" ;else exit 1;fi \ No newline at end of file diff --git a/codecheck.ignore b/codecheck.ignore new file mode 100644 index 00000000000..cc679f1529c --- /dev/null +++ b/codecheck.ignore @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/DESUtil.java +linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/cs/common/serialize/helper/ContextSerializationHelper.java +linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/cs/listener/callback/imp/DefaultContextIDCallbackEngine.java +linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/cs/listener/callback/imp/DefaultContextKeyCallbackEngine.java +linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/utils/EngineTypeLabelCreator.java +linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/cs/listener/ListenerBus/ContextAsyncListenerBus.java +linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/LabelManagerMapper.xml +linkis-engineconn-plugins/hbase/hbase-core/src/main/java/org/apache/linkis/manager/engineplugin/hbase/HBaseConnectionManager.java +linkis-public-enhancements/linkis-instance-label-server/src/main/resources/mapper/common/InsLabelRelationMapper.xml +linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/AESUtils.java \ No newline at end of file diff --git a/docs/ai-prompt/base-rule.md b/docs/ai-prompt/base-rule.md new file mode 100644 index 00000000000..a32b7105b57 --- /dev/null +++ b/docs/ai-prompt/base-rule.md @@ -0,0 +1,378 @@ +# Apache Linkis AI IDE 开发规约 + +## 角色定位 +你是Apache Linkis项目的资深后端开发专家,熟练掌握: +- **核心技术栈**:Spring Boot 2.7 + Spring Cloud 2021.0.8 + MyBatis-Plus 3.5.7 +- **编程语言**:Java 8 + Scala 2.12(混合开发模式) +- **数据库**:MySQL 8.0 + Hive(通过JDBC) +- **微服务架构**:Eureka服务发现 + Gateway网关 + Feign远程调用 +- **大数据引擎**:Spark、Hive、Flink、Python、Shell等多引擎支持 + +--- + +# 项目核心信息 + +## 基础配置 +- **项目根目录**:linkis +- **基础包名**:org.apache.linkis +- **版本信息**:Apache Linkis 1.x +- **构建工具**:Maven 3.5+ +- **JDK版本**:1.8 +- **字符编码**:统一使用StandardCharsets.UTF_8 + +## 关键组件 +- **统一返回体**:`org.apache.linkis.server.Message` +- **统一异常**:`org.apache.linkis.common.exception.LinkisException` +- **配置管理**:`org.apache.linkis.common.conf.CommonVars` +- **数据库脚本**: + - DDL:`linkis-dist/package/db/linkis_ddl.sql` + - DML:`linkis-dist/package/db/linkis_dml.sql` + +--- + +# 系统架构设计 + +## 三层架构模式 +Linkis采用微服务架构,按功能职责划分为三大服务类别: + +### 1. 微服务治理服务(基础设施层) +负责微服务的基础设施支撑,包括服务发现、网关路由、配置管理等。 +- Spring Cloud Gateway:API网关服务 +- Eureka:服务注册与发现中心 +- Open Feign:声明式HTTP客户端 + +### 2. 计算治理服务(核心业务层) +负责计算任务的生命周期管理,从任务提交到执行完成的全流程控制。 +- Entrance:任务提交入口服务 +- JobHistory:任务历史记录服务 +- LinkisManager:资源管理服务 +- EngineConnManager:引擎连接管理服务 +- EngineConn:引擎连接器 + +### 3. 公共增强服务(支撑服务层) +提供跨服务的公共能力,如文件管理、数据源管理、配置管理等。 +- PublicService:公共服务 +- BML:大数据物料库 +- DataSource:数据源管理 +- Configuration:配置管理 +- ContextServer:上下文服务 +- Monitor:监控服务 + +## 服务交互模式 +``` +上层应用 -> Gateway -> Entrance -> Manager -> ECM -> EngineConn -> 底层引擎 + ↓ + 公共增强服务(BML、DataSource、Configuration等) +``` + +## 各服务模块说明 +### 微服务治理服务 +Spring Cloud Gateway +功能:API网关服务,负责请求路由转发、负载均衡、安全认证等 +主类入口:org.apache.linkis.gateway.springcloud.LinkisGatewayApplication +模块路径:linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway + +Eureka +功能:服务注册与发现中心,管理微服务实例的注册、发现和健康检查 +主类入口:org.apache.linkis.eureka.SpringCloudEurekaApplication +模块路径:linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka + +Open Feign +功能:声明式HTTP客户端,简化微服务间的远程调用 +主类入口:集成在各个微服务模块中,无独立启动类 +模块路径:集成在linkis-commons/linkis-rpc等公共模块中 + +### 计算治理服务 +Entrance +功能:任务提交入口服务,负责任务调度、状态管控、任务信息推送等核心功能 +主类入口:org.apache.linkis.entrance.LinkisEntranceApplication +模块路径:linkis-computation-governance/linkis-entrance + +JobHistory +功能:任务历史记录服务,提供任务执行历史的查询、统计和管理功能 +主类入口:org.apache.linkis.jobhistory.LinkisJobHistoryApp +模块路径:linkis-public-enhancements/linkis-jobhistory + +LinkisManager +功能:计算治理层的管理服务,包含AppManager、ResourceManager、LabelManager等管理控制服务 +主类入口:org.apache.linkis.manager.LinkisManagerApplication +模块路径:linkis-computation-governance/linkis-manager/linkis-application-manager + +EngineConnManager +功能:引擎连接器管理服务,负责控制EngineConn的生命周期(启动、停止) +主类入口:org.apache.linkis.ecm.server.LinkisECMApplication +模块路径:linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server + +EngineConn +功能:引擎连接器,负责接收任务并提交到Spark、Hive、Flink等底层引擎执行 +主类入口:org.apache.linkis.engineconn.LinkisEngineConnApplication +模块路径:linkis-computation-governance/linkis-engineconn + +### 公共增强服务 +PublicService +功能:公共服务模块,提供统一配置管理、微服务管理等基础服务能力 +主类入口:org.apache.linkis.filesystem.LinkisPublicServiceApp +模块路径:linkis-public-enhancements/linkis-pes-publicservice + +BML +功能:大数据物料库服务(BigData Material Library),提供文件上传、下载、版本管理等功能 +主类入口:org.apache.linkis.bml.LinkisBMLApplication +模块路径:linkis-public-enhancements/linkis-bml-server + +DataSource +功能:数据源管理服务,提供统一的数据源连接、管理和元数据服务 +主类入口:org.apache.linkis.metadata.LinkisDataSourceApplication(数据源服务) +模块路径:linkis-public-enhancements/linkis-datasource + +Configuration +功能:配置管理服务,提供系统级、用户级、引擎级等多层次的配置管理 +主类入口:org.apache.linkis.configuration.LinkisConfigurationApp +模块路径:linkis-public-enhancements/linkis-configuration + +ContextServer +功能:上下文服务,支持跨引擎的资源共享、变量传递和会话管理 +主类入口:org.apache.linkis.cs.server.LinkisCSApplication +模块路径:linkis-public-enhancements/linkis-cs-server + +Monitor +功能:监控服务,提供系统性能监控、告警和运维管理功能,包括任务监控、资源监控、用户模式监控等 +主类入口:org.apache.linkis.monitor.LinksMonitorApplication +模块路径:linkis-extensions/linkis-et-monitor + +--- + +# 开发规范与约束 + +## 代码边界约束 + +### 🚫 禁止操作 +- **数据库结构**:除非明确指定,严禁修改现有表结构 +- **第三方依赖**:不允许引入新的第三方依赖库 +- **核心接口**:不得修改现有公共接口的签名 + +### ✅ 允许操作 +- **新增功能**:在不破坏现有逻辑的前提下扩展功能 +- **新增配置**:在现有配置文件中新增配置项 +- **新增表字段**:在现有表基础上新增字段 + +## 技术规范 + +### 编程语言使用 +- **Java**:主要用于REST API、Service层、Entity类、配置类 +- **Scala**:主要用于计算逻辑、RPC通信、复杂业务处理 + +### 日志规范 +```java +// 必须使用统一的Logger +private static final Logger logger = LoggerFactory.getLogger(ClassName.class); + +// 日志级别使用: +// ERROR: 系统错误、业务异常 +// WARN: 警告信息、降级处理 +// INFO: 关键业务节点、状态变更 +// DEBUG: 详细调试信息 + +logger.info("User {} starts processing task {}", username, taskId); +logger.error("Failed to process task {} for user {}", taskId, username, e); +``` + +### 配置管理规范 +- 所有配置统一使用`org.apache.linkis.common.conf.CommonVars` +- 参考示例:`org.apache.linkis.jobhistory.conf.JobhistoryConfiguration` +- 所有新需求必须添加配置开关,默认设置false +- 配置存放位置:当前模块的conf目录,一般为xxxConfiguration类 + +### 字符编码规范 +```java +// 统一使用StandardCharsets.UTF_8,禁止使用字符串"UTF-8" +import java.nio.charset.StandardCharsets; + +String content = new String(bytes, StandardCharsets.UTF_8); +Files.write(path, content.getBytes(StandardCharsets.UTF_8)); +``` + +### API设计规范 +```java +@Api(tags = "module operation") +@RestController +@RequestMapping(path = "/api/rest_j/v1/module") +public class ModuleRestfulApi { + + @ApiOperation(value = "operation", notes = "description", response = Message.class) + @RequestMapping(path = "/operation", method = RequestMethod.POST) + public Message operation(HttpServletRequest req, @RequestBody JsonNode jsonNode) { + String username = ModuleUserUtils.getOperationUser(req, "operation"); + // 业务逻辑处理 + return Message.ok("success").data("result", data); + } +} +``` + +### 异常处理规范 +```java +// 统一使用LinkisException及其子类 +try { + // 业务逻辑 +} catch (Exception e) { + logger.error("Operation failed", e); + throw new YourModuleException("Error message", e); +} +``` + +--- + +# 开发模板与示例 + +## 新功能开发模板 + +### 1. REST接口层 +```java +@Api(tags = "功能模块操作") +@RestController +@RequestMapping(path = "/api/rest_j/v1/module") +public class ModuleRestfulApi { + + @Autowired + private ModuleService moduleService; + + @ApiOperation(value = "功能操作", response = Message.class) + @RequestMapping(path = "/action", method = RequestMethod.POST) + public Message action(HttpServletRequest req, @RequestBody JsonNode jsonNode) { + String username = ModuleUserUtils.getOperationUser(req, "action"); + + // 参数解析和验证 + String param = jsonNode.get("param").asText(); + if (StringUtils.isBlank(param)) { + return Message.error("参数不能为空"); + } + + try { + Object result = moduleService.performAction(param, username); + return Message.ok("操作成功").data("result", result); + } catch (Exception e) { + logger.error("操作失败", e); + return Message.error("操作失败:" + e.getMessage()); + } + } +} +``` + +### 2. 服务层 +```java +@Service +public class ModuleServiceImpl implements ModuleService { + + private static final Logger logger = LoggerFactory.getLogger(ModuleServiceImpl.class); + + @Autowired + private ModuleMapper moduleMapper; + + @Override + @Transactional(rollbackFor = Exception.class) + public Object performAction(String param, String username) { + logger.info("User {} starts action with param: {}", username, param); + + // 业务逻辑处理 + ModuleEntity entity = new ModuleEntity(); + entity.setParam(param); + entity.setCreateUser(username); + + moduleMapper.insert(entity); + + logger.info("User {} completed action successfully", username); + return entity.getId(); + } +} +``` + +### 3. 数据访问层 +```java +@Mapper +public interface ModuleMapper { + + @Insert("INSERT INTO linkis_module_table (param, create_user, create_time) " + + "VALUES (#{param}, #{createUser}, NOW())") + @Options(useGeneratedKeys = true, keyProperty = "id") + void insert(ModuleEntity entity); + + @Select("SELECT * FROM linkis_module_table WHERE id = #{id}") + ModuleEntity selectById(@Param("id") Long id); +} +``` + +### 4. 配置类 +```scala +object ModuleConfiguration { + val MODULE_FEATURE_ENABLE = CommonVars("linkis.module.feature.enable", false) + val MODULE_TIMEOUT = CommonVars("linkis.module.timeout", 30000L) + val MODULE_BATCH_SIZE = CommonVars("linkis.module.batch.size", 1000) +} +``` + +--- + +# 需求开发流程 + +## 需求分析模板 + +### 【背景说明】 +描述业务场景、现有问题或痛点、期望解决的目标 + +### 【验收标准】 +- 功能验收点(具体、可测量) +- 性能要求(响应时间、并发数等) +- 安全要求(权限控制、数据保护) +- 兼容性要求(向后兼容) + +## 开发交付清单 + +### 变更清单 +- 新增/修改的文件路径列表 +- 数据库变更脚本(DDL/DML) +- 配置文件变更 + +### 测试验证 +- 单元测试代码 +- 集成测试用例 +- 手动测试命令(curl等) + +### 质量检查 +- [ ] 代码符合项目规范 +- [ ] 异常处理完整 +- [ ] 日志记录充分 +- [ ] 单元测试覆盖 +- [ ] 配置开关完整 +- [ ] 向后兼容性检查 + +--- + +# AI IDE开发提示 + +## 开发技巧 +1. **优先查看现有代码**:在新增功能前,先查看相似功能的实现方式 +2. **遵循现有模式**:保持与现有代码风格一致 +3. **充分测试**:编写充分的单元测试和集成测试 +4. **考虑边界情况**:处理各种异常和边界条件 + +## 常见问题及解决方案 + +### 1. 字符编码问题 +**问题**:HTTP传输过程中出现中文乱码 +**解决**:统一使用`StandardCharsets.UTF_8` + +### 2. 配置热更新问题 +**问题**:配置修改后需要重启服务 +**解决**:使用`CommonVars`并配合`@RefreshScope`注解 + +### 3. 性能优化问题 +**问题**:大批量数据处理性能差 +**解决**:采用分页处理,单次处理不超过5000条 + +--- + +**📝 重要提示** +1. 严格遵循现有架构设计,不得随意修改核心组件 +2. 新增功能必须考虑向后兼容性 +3. 关键业务逻辑必须有完整的异常处理和日志记录 +4. 所有配置项必须有合理的默认值 +5. 代码提交前必须通过本地测试验证 \ No newline at end of file diff --git a/docs/configuration/linkis-storage.md b/docs/configuration/linkis-storage.md index 45b07afc1d2..8e04b073be5 100644 --- a/docs/configuration/linkis-storage.md +++ b/docs/configuration/linkis-storage.md @@ -29,4 +29,4 @@ |linkis-storage|wds.linkis.fs.hdfs.impl.disable.cache| false |disable.cache |true| |linkis-storage|wds.linkis.hdfs.rest.errs| |rest.errs|true| |linkis-storage|wds.linkis.resultset.row.max.str | 2m | max.str |true| -|linkis-storage|wds.linkis.storage.file.type | dolphin,sql,scala,py,hql,python,out,log,text,sh,jdbc,ngql,psql,fql,tsql | file.type |true| +|linkis-storage|wds.linkis.storage.file.type | dolphin,sql,scala,py,hql,python,out,log,text,sh,jdbc,ngql,psql,fql,tsql,txt | file.type |true| diff --git a/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md b/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md index a929d062d24..0fc2521a76d 100644 --- a/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md +++ b/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md @@ -3,7 +3,7 @@ | module name(模块名) | error code(错误码) | describe(描述) |enumeration name(枚举)| Exception Class(类名)| | -------- | -------- | ----- |-----|-----| |linkis-engineconn-plugin-core |10001|Failed to createEngineConnLaunchRequest(创建 EngineConnLaunchRequest失败)|FAILED_CREATE_ELR|EngineconnCoreErrorCodeSummary| -|linkis-engineconn-plugin-core |10001|The engine plug-in material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)|EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION|EngineconnCoreErrorCodeSummary| +|linkis-engineconn-plugin-core |10001|The engine plugin material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)|EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION|EngineconnCoreErrorCodeSummary| |linkis-engineconn-plugin-core |10001|EngineTypeLabel are requested(需要参数 EngineTypeLabel)|ETL_REQUESTED|EngineconnCoreErrorCodeSummary| |linkis-engineconn-plugin-core |20000|Cannot instance EngineConnExecution(无法实例化 EngineConnExecution)|CANNOT_INSTANCE_ECE|EngineconnCoreErrorCodeSummary| |linkis-engineconn-plugin-core |20000|Cannot find default ExecutorFactory(找不到默认的 ExecutorFactory)|CANNOT_DEFAULT_EF|EngineconnCoreErrorCodeSummary| diff --git a/docs/errorcode/python-errorcode.md b/docs/errorcode/python-errorcode.md index 8a4f8434840..cf63aaa3fe8 100644 --- a/docs/errorcode/python-errorcode.md +++ b/docs/errorcode/python-errorcode.md @@ -1,7 +1,7 @@ ## python errorcode -| 模块名(服务名) | 错误码 | 描述 |enumeration name| Exception Class| -| -------- | -------- | ----- |-----|-----| -|python|41001| |PYTHON_EXECUTE_ERROR|LinkisPythonErrorCodeSummary| -|python|60003|Pyspark process has stopped, query failed!(Pyspark 进程已停止,查询失败!)|PYSPARK_PROCESSS_STOPPED|LinkisPythonErrorCodeSummary| -|python|400201|Invalid python session.(无效的 python 会话.)|INVALID_PYTHON_SESSION|LinkisPythonErrorCodeSummary| +| 模块名(服务名) | 错误码 | 描述 |enumeration name| Exception Class| +| -------- |--------| ----- |-----|-----| +|python| 60002 | |PYTHON_EXECUTE_ERROR|LinkisPythonErrorCodeSummary| +|python| 60003 |Pyspark process has stopped, query failed!(Pyspark 进程已停止,查询失败!)|PYSPARK_PROCESSS_STOPPED|LinkisPythonErrorCodeSummary| +|python| 400201 |Invalid python session.(无效的 python 会话.)|INVALID_PYTHON_SESSION|LinkisPythonErrorCodeSummary| diff --git a/docs/info-1.3.2.md b/docs/info-1.3.2.md index 55e5aeecbc7..0c690517fb3 100644 --- a/docs/info-1.3.2.md +++ b/docs/info-1.3.2.md @@ -4,4 +4,5 @@ |------------------| ----- |----------------------------------------------------------------------|------| ------------------------------------------------------- | | linkis-jobhistory | 新增 | wds.linkis.jobhistory.admin | hadoop |可以查看所有历史任务的用户 注意:wds.linkis.governance.station.admin 为管理用户(也具有可以查看所有历史任务的权限)| | linkis | 新增 | wds.linkis.governance.station.admin.token | /具有管理员权限的特殊token| +| linkis | 新增 | linkis.configuration.remove.application.cache | IDE |清除该应用的配置缓存| | cg-entrance | 新增 | linkis.entrance.auto.clean.dirty.data.enable | true |entrance重启调用ps-jobhistory接口是否开启,ture为开启,取值范围:true或false| diff --git a/linkis-commons/linkis-common/pom.xml b/linkis-commons/linkis-common/pom.xml index 425c0673d2c..1484def187d 100644 --- a/linkis-commons/linkis-common/pom.xml +++ b/linkis-commons/linkis-common/pom.xml @@ -124,6 +124,11 @@ 1.10 + + org.springframework.boot + spring-boot-actuator + + org.reflections reflections @@ -150,7 +155,7 @@ com.github.oshi oshi-core - 6.2.1 + 6.4.0 diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/ErrorException.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/ErrorException.java index fc361ee860a..d7ea54d2685 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/ErrorException.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/ErrorException.java @@ -17,7 +17,7 @@ package org.apache.linkis.common.exception; -public class ErrorException extends LinkisException { +public class ErrorException extends LinkisRuntimeException { private ExceptionLevel level = ExceptionLevel.ERROR; public ErrorException(int errCode, String desc) { diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/LinkisRetryException.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/LinkisRetryException.java index b9efe6cdf93..01a95db0f38 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/LinkisRetryException.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/exception/LinkisRetryException.java @@ -17,7 +17,7 @@ package org.apache.linkis.common.exception; -public class LinkisRetryException extends LinkisException { +public class LinkisRetryException extends LinkisRuntimeException { LinkisRetryException(int errCode, String desc, String ip, int port, String serviceKind) { super(errCode, desc, ip, port, serviceKind); } @@ -27,7 +27,7 @@ public LinkisRetryException(int errCode, String desc) { } @Override - ExceptionLevel getLevel() { + public ExceptionLevel getLevel() { return ExceptionLevel.RETRY; } } diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java index e434bd72ee0..f4eaa6697be 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/io/Fs.java @@ -44,6 +44,8 @@ public interface Fs extends Closeable { boolean canRead(FsPath dest) throws IOException; + boolean canRead(FsPath dest, String user) throws IOException; + boolean canWrite(FsPath dest) throws IOException; boolean exists(FsPath dest) throws IOException; diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/AESUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/AESUtils.java new file mode 100644 index 00000000000..969cb8718eb --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/AESUtils.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.exception.ErrorException; + +import org.apache.commons.net.util.Base64; + +import javax.crypto.Cipher; +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; + +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; + +/** + * @author cr949 + * @description 字符串加密 生成xx位加密串 + */ +public class AESUtils { + + /** key 加密算法 */ + private static final String KEY_ALGORITHM = "AES"; + + /** 固定值 */ + private static final String SECRET_RANDOM = "SHA1PRNG"; + + /** 编码方式 */ + public static final String ENCODING_TYPE = "UTF-8"; + + /** 默认的加密算法 */ + private static final String DEFAULT_CIPHER_ALGORITHM = "AES/ECB/PKCS5Padding"; + + public static final String PASSWORD = "password"; + + public static final String IS_ENCRYPT = "isEncrypt"; + + public static final String DECRYPT = "0"; + + public static final String ENCRYPT = "1"; + + public static final CommonVars LINKIS_DATASOURCE_AES_KEY = + CommonVars.apply("linkis.datasource.aes.secretkey", ""); + + public static final CommonVars LINKIS_DATASOURCE_AES_SWITCH = + CommonVars.apply("linkis.datasource.aes.switch", false); + + /** + * 加密 + * + * @param content + * @param password + * @return + */ + public static String encrypt(String content, String password) { + try { + // 创建密码器 + Cipher cipher = Cipher.getInstance(DEFAULT_CIPHER_ALGORITHM); + + byte[] byteContent = content.getBytes(ENCODING_TYPE); + // 初始化为加密模式的密码器 + cipher.init(Cipher.ENCRYPT_MODE, getSecretKey(password)); + // 加密 + byte[] result = cipher.doFinal(byteContent); + // 通过Base64转码返回 + return Base64.encodeBase64String(result).trim(); + } catch (Exception e) { + throw new ErrorException(21304, "AES加密加密失败"); + } + } + + public static String encrypt(byte[] content, String password) { + try { + // 创建密码器 + Cipher cipher = Cipher.getInstance(DEFAULT_CIPHER_ALGORITHM); + // 初始化为加密模式的密码器 + cipher.init(Cipher.ENCRYPT_MODE, getSecretKey(password)); + // 加密 + byte[] result = cipher.doFinal(content); + // 通过Base64转码返回 + return Base64.encodeBase64String(result).trim(); + } catch (Exception e) { + throw new ErrorException(21304, "AES加密加密失败"); + } + } + + /** + * AES 解密操作 + * + * @param content + * @param password + * @return + */ + public static String decrypt(String content, String password) { + try { + // 实例化 + Cipher cipher = Cipher.getInstance(DEFAULT_CIPHER_ALGORITHM); + // 使用密钥初始化,设置为解密模式 + cipher.init(Cipher.DECRYPT_MODE, getSecretKey(password)); + // 执行操作 + byte[] result = cipher.doFinal(Base64.decodeBase64(content)); + return new String(result, ENCODING_TYPE); + } catch (Exception e) { + throw new ErrorException(21304, "AES加密解密失败"); + } + } + + /** + * AES 解密操作 + * + * @param content + * @param password + * @return + */ + public static byte[] decrypt(byte[] content, String password) { + try { + // 实例化 + Cipher cipher = Cipher.getInstance(DEFAULT_CIPHER_ALGORITHM); + // 使用密钥初始化,设置为解密模式 + cipher.init(Cipher.DECRYPT_MODE, getSecretKey(password)); + // 执行操作 + return cipher.doFinal(Base64.decodeBase64(content)); + } catch (Exception e) { + throw new ErrorException(21304, "AES加密解密失败"); + } + } + + /** + * 生成加密秘钥 + * + * @return + */ + private static SecretKeySpec getSecretKey(String password) { + // 返回生成指定算法密钥生成器的 KeyGenerator 对象 + KeyGenerator kg; + try { + kg = KeyGenerator.getInstance(KEY_ALGORITHM); + SecureRandom secureRandom = SecureRandom.getInstance(SECRET_RANDOM); + secureRandom.setSeed(password.getBytes()); + // AES 要求密钥长度为 128 + kg.init(128, secureRandom); + // 生成一个密钥 + SecretKey secretKey = kg.generateKey(); + // 转换为AES专用密钥 + return new SecretKeySpec(secretKey.getEncoded(), KEY_ALGORITHM); + } catch (NoSuchAlgorithmException e) { + throw new ErrorException(21304, "AES生成加密秘钥失败"); + } + } + + public static String isDecryptByConf(String password) { + if (AESUtils.LINKIS_DATASOURCE_AES_SWITCH.getValue()) { + // decrypt + password = AESUtils.decrypt(password, AESUtils.LINKIS_DATASOURCE_AES_KEY.getValue()); + } + return password; + } +} diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java index d23f4a0867d..e81da47e693 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java @@ -213,7 +213,6 @@ private static long parseByteString(String str, ByteUnit unit) { } else { throw new NumberFormatException("Failed to parse byte string: " + str); } - suffix = suffix.toLowerCase(); // Check for invalid suffixes if (suffix != null && !byteSuffixes.containsKey(suffix)) { throw new NumberFormatException("Invalid suffix: \"" + suffix + "\""); @@ -297,6 +296,18 @@ public static long byteStringAsGb(String str) { return parseByteString(str, ByteUnit.GiB); } + /** + * Convert a passed byte string (e.g. -50b, -100k, or -250m) to gibibytes for internal use. + * + *

If no suffix is provided, the passed number is assumed to be in gibibytes. + */ + public static long negativeByteStringAsGb(String str) { + if (str.startsWith("-")) { + return Math.negateExact(parseByteString(str.substring(1), ByteUnit.GiB)); + } + return parseByteString(str, ByteUnit.GiB); + } + /** * Returns a byte array with the buffer's contents, trying to avoid copying the data if possible. */ @@ -354,7 +365,7 @@ public double toBytes(long d) { if (d < 0) { throw new IllegalArgumentException("Negative size value. Size must be positive: " + d); } - return d * multiplier; + return (double) d * multiplier; } public long toKiB(long d) { diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/LinkisUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/LinkisUtils.java new file mode 100644 index 00000000000..353f80f1da8 --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/LinkisUtils.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.FatalException; +import org.apache.linkis.common.exception.WarnException; + +import java.util.concurrent.Callable; +import java.util.function.Function; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LinkisUtils { + private static final Logger logger = LoggerFactory.getLogger(LinkisUtils.class); + + public static T tryCatch(Callable tryOp, Function catchOp) { + T result = null; + try { + result = tryOp.call(); + } catch (Throwable t) { + if (t instanceof FatalException) { + logger.error("Fatal error, system exit...", t); + System.exit(((FatalException) t).getErrCode()); + } else if (t instanceof VirtualMachineError) { + logger.error("Fatal error, system exit...", t); + System.exit(-1); + } else if (null != t.getCause() + && (t.getCause() instanceof FatalException + || t.getCause() instanceof VirtualMachineError)) { + logger.error("Caused by fatal error, system exit...", t); + System.exit(-1); + } else if (t instanceof Error) { + logger.error("Throw error", t); + throw (Error) t; + } else { + result = catchOp.apply(t); + } + } + return result; + } + + public static void tryFinally(Runnable tryOp, Runnable finallyOp) { + try { + tryOp.run(); + } finally { + finallyOp.run(); + } + } + + public static T tryAndWarn(Callable tryOp, Logger log) { + return tryCatch( + tryOp, + t -> { + if (t instanceof ErrorException) { + ErrorException error = (ErrorException) t; + log.error( + "Warning code(警告码): {}, Warning message(警告信息): {}.", + error.getErrCode(), + error.getDesc(), + error); + + } else if (t instanceof WarnException) { + WarnException warn = (WarnException) t; + log.warn( + "Warning code(警告码): {}, Warning message(警告信息): {}.", + warn.getErrCode(), + warn.getDesc(), + warn); + + } else { + log.warn("", t); + } + return null; + }); + } + + public static void tryAndErrorMsg(Runnable tryOp, String message, Logger log) { + try { + tryOp.run(); + } catch (WarnException t) { + WarnException warn = (WarnException) t; + log.warn( + "Warning code(警告码): {}, Warning message(警告信息): {}.", warn.getErrCode(), warn.getDesc()); + log.warn(message, warn); + } catch (Exception t) { + log.warn(message, t); + } + } + + public static void tryAndWarn(Runnable tryOp, Logger log) { + try { + tryOp.run(); + } catch (Throwable error) { + if (error instanceof WarnException) { + WarnException warn = (WarnException) error; + log.warn( + "Warning code(警告码): {}, Warning message(警告信息): {}.", + warn.getErrCode(), + warn.getDesc(), + error); + } else { + log.warn("", error); + } + } + } + + public static void tryAndWarnMsg(Runnable tryOp, String message, Logger log) { + try { + tryOp.run(); + } catch (WarnException t) { + WarnException warn = (WarnException) t; + log.warn( + "Warning code(警告码): {}, Warning message(警告信息): {}.", warn.getErrCode(), warn.getDesc()); + log.warn(message, warn); + } catch (Exception t) { + log.warn(message, t); + } + } + + public static T tryAndWarnMsg(Callable tryOp, String message, Logger log) { + return tryCatch( + tryOp, + t -> { + if (t instanceof ErrorException) { + ErrorException error = (ErrorException) t; + log.warn( + "Warning code(警告码): {}, Warning message(警告信息): {}.", + error.getErrCode(), + error.getDesc()); + log.warn(message, error); + } else if (t instanceof WarnException) { + WarnException warn = (WarnException) t; + log.warn( + "Warning code(警告码): {}, Warning message(警告信息): {}.", + warn.getErrCode(), + warn.getDesc()); + log.warn(message, warn); + } else { + log.warn(message, t); + } + return null; + }); + } + + public static String getJvmUser() { + return System.getProperty("user.name"); + } +} diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/MD5Utils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/MD5Utils.java new file mode 100644 index 00000000000..1291b8bb68b --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/MD5Utils.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +public class MD5Utils { + + /** + * @param plaintext + * @return + * @throws NoSuchAlgorithmException + */ + public static String encrypt(String plaintext) throws NoSuchAlgorithmException { + // 使用 MD5 算法创建 MessageDigest 对象 + MessageDigest md = MessageDigest.getInstance("MD5"); + // 更新 MessageDigest 对象中的字节数据 + md.update(plaintext.getBytes()); + // 对更新后的数据计算哈希值,存储在 byte 数组中 + byte[] digest = md.digest(); + // 将 byte 数组转换为十六进制字符串 + StringBuilder sb = new StringBuilder(); + for (byte b : digest) { + sb.append(String.format("%02x", b & 0xff)); + } + // 返回十六进制字符串 + return sb.toString(); + } +} diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ResultSetUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ResultSetUtils.java new file mode 100644 index 00000000000..a367b38b80b --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ResultSetUtils.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import org.apache.linkis.common.io.FsPath; + +import java.io.File; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ResultSetUtils { + + // Sort in ASC order by numx in the result set _numx.dolphin file name + public static Comparator getResultSetFileComparatorOrderByNameNum() { + + Comparator comparator = + (o1, o2) -> { + // get the num of file name + String regx = "\\d+"; + + String[] res1 = o1.getPath().split(File.separator); + String fileName1 = res1[res1.length - 1]; + Matcher matcher1 = Pattern.compile(regx).matcher(fileName1); + int num1 = matcher1.find() ? Integer.parseInt(matcher1.group()) : Integer.MAX_VALUE; + + String[] res2 = o2.getPath().split(File.separator); + String fileName2 = res2[res2.length - 1]; + Matcher matcher2 = Pattern.compile(regx).matcher(fileName2); + int num2 = matcher2.find() ? Integer.parseInt(matcher2.group()) : Integer.MAX_VALUE; + + return num1 - num2; + }; + return comparator; + } + + public static void sortByNameNum(List fsPathList) { + Collections.sort(fsPathList, getResultSetFileComparatorOrderByNameNum()); + } +} diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SHAUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SHAUtils.java new file mode 100644 index 00000000000..b3fe61a1c93 --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SHAUtils.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import org.apache.commons.lang3.StringUtils; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.Map; + +public class SHAUtils { + + /** + * 对字符串加密,默认使用SHA-256 + * + * @param strSrc 要加密的字符串 + * @param encName 加密类型 + * @return + * @throws UnsupportedEncodingException + */ + public static String Encrypt(String strSrc, String encName) throws UnsupportedEncodingException { + MessageDigest md = null; + String strDes = null; + byte[] bt = strSrc.getBytes("utf-8"); + try { + if (encName == null || encName.equals("")) { + encName = "SHA-256"; + } + md = MessageDigest.getInstance(encName); + md.update(bt); + strDes = bytes2Hex(md.digest()); // to HexString + } catch (NoSuchAlgorithmException e) { + return null; + } + return strDes; + } + + public static String bytes2Hex(byte[] bts) { + String des = ""; + String tmp = null; + for (int i = 0; i < bts.length; i++) { + tmp = (Integer.toHexString(bts[i] & 0xFF)); + if (tmp.length() == 1) { + des += "0"; + } + des += tmp; + } + return des; + } + + public static void main(String[] args) throws IOException { + String applicationId = args[0]; + String app_id = args[1]; + String token = args[2]; + String nonce = args[3]; + if (StringUtils.isBlank(applicationId)) { + throw new LinkageError("Invalid applicationId cannot be empty"); + } + if (StringUtils.isBlank(app_id)) { + throw new LinkageError("Invalid app_id cannot be empty"); + } + if (StringUtils.isBlank(token)) { + throw new LinkageError("Invalid token cannot be empty"); + } + if (StringUtils.isBlank(nonce)) { + throw new LinkageError("Invalid nonce cannot be empty"); + } + Map parms = new HashMap<>(); + String timestampStr = String.valueOf(System.currentTimeMillis()); + parms.put("applicationId", applicationId); + parms.put("app_id", app_id); + parms.put("timestamp", timestampStr); + parms.put("nonce", nonce); + if (StringUtils.isNotBlank(token)) { + String signature = + Encrypt(Encrypt(parms.get("app_id") + nonce + timestampStr, null) + token, null); + parms.put("signature", signature); + } + System.out.println(parms); + } +} diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java index f7158b4899b..af163a64948 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/SecurityUtils.java @@ -25,10 +25,9 @@ import java.io.UnsupportedEncodingException; import java.net.URLDecoder; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.Map; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import java.util.stream.Collectors; import org.slf4j.Logger; @@ -46,8 +45,12 @@ public abstract class SecurityUtils { private static final String QUESTION_MARK = "?"; + private static final String REGEX_QUESTION_MARK = "\\?"; + + private static final int JDBC_URL_ITEM_COUNT = 2; + /** allowLoadLocalInfile,allowLoadLocalInfiled,# */ - public static final CommonVars MYSQL_SENSITIVE_PARAMS = + private static final CommonVars MYSQL_SENSITIVE_PARAMS = CommonVars$.MODULE$.apply( "linkis.mysql.sensitive.params", "allowLoadLocalInfile,autoDeserialize,allowLocalInfile,allowUrlInLocalInfile,#"); @@ -55,16 +58,129 @@ public abstract class SecurityUtils { /** * "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false" */ - public static final CommonVars MYSQL_FORCE_PARAMS = + private static final CommonVars MYSQL_FORCE_PARAMS = CommonVars$.MODULE$.apply( "linkis.mysql.force.params", "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false"); - public static final CommonVars MYSQL_STRONG_SECURITY_ENABLE = + private static final CommonVars MYSQL_STRONG_SECURITY_ENABLE = CommonVars$.MODULE$.apply("linkis.mysql.strong.security.enable", "false"); + private static final CommonVars MYSQL_SECURITY_CHECK_ENABLE = + CommonVars$.MODULE$.apply("linkis.mysql.security.check.enable", "true"); + + private static final CommonVars MYSQL_CONNECT_URL = + CommonVars.apply("linkis.security.mysql.url.template", "jdbc:mysql://%s:%s/%s"); + + private static final CommonVars JDBC_MATCH_REGEX = + CommonVars$.MODULE$.apply( + "linkis.mysql.jdbc.match.regex", + "(?i)jdbc:(?i)(mysql)://([^:]+)(:[0-9]+)?(/[a-zA-Z0-9_-]*[\\.\\-]?)?"); + + private static final String JDBC_MYSQL_PROTOCOL = "jdbc:mysql"; + /** - * mysql url append force params + * check mysql connection params + * + * @param host + * @param port + * @param username + * @param password + * @param database + * @param extraParams + */ + public static void checkJdbcConnParams( + String host, + Integer port, + String username, + String password, + String database, + Map extraParams) { + + // check switch + if (!Boolean.valueOf(MYSQL_SECURITY_CHECK_ENABLE.getValue())) { + return; + } + + // 1. Check blank params + if (StringUtils.isAnyBlank(host, username)) { + logger.error( + "Invalid mysql connection params: host: {}, username: {}, database: {}", + host, + username, + database); + throw new LinkisSecurityException(35000, "Invalid mysql connection params."); + } + + // 2. Check url format + String url = String.format(MYSQL_CONNECT_URL.getValue(), host.trim(), port, database.trim()); + checkUrl(url); + + // 3. Check params. Mainly vulnerability parameters. Note the url encoding + checkParams(extraParams); + } + + /** @param url */ + public static void checkJdbcConnUrl(String url) { + + // check switch + if (!Boolean.valueOf(MYSQL_SECURITY_CHECK_ENABLE.getValue())) { + return; + } + + logger.info("jdbc url: {}", url); + if (StringUtils.isBlank(url)) { + throw new LinkisSecurityException(35000, "Invalid jdbc connection url."); + } + + // temporarily only check mysql jdbc url. + if (!url.toLowerCase().startsWith(JDBC_MYSQL_PROTOCOL)) { + return; + } + + String[] urlItems = url.split(REGEX_QUESTION_MARK); + if (urlItems.length > JDBC_URL_ITEM_COUNT) { + throw new LinkisSecurityException(35000, "Invalid jdbc connection url."); + } + + // check url + checkUrl(urlItems[0]); + + // check params + if (urlItems.length == JDBC_URL_ITEM_COUNT) { + Map params = parseMysqlUrlParamsToMap(urlItems[1]); + checkParams(params); + } + } + + /** + * call after checkJdbcConnUrl + * + * @param url + * @return + */ + public static String getJdbcUrl(String url) { + // preventing NPE + if (StringUtils.isBlank(url)) { + return url; + } + // temporarily deal with only mysql jdbc url. + if (!url.toLowerCase().startsWith(JDBC_MYSQL_PROTOCOL)) { + return url; + } + String[] items = url.split(REGEX_QUESTION_MARK); + String result = items[0]; + if (items.length == JDBC_URL_ITEM_COUNT) { + Map params = parseMysqlUrlParamsToMap(items[1]); + appendMysqlForceParams(params); + String paramUrl = parseParamsMapToMysqlParamUrl(params); + result += QUESTION_MARK + paramUrl; + } + return result; + } + + /** + * append force params, Should be called after the checkJdbcConnParams method * * @param url * @return @@ -73,6 +189,9 @@ public static String appendMysqlForceParams(String url) { if (StringUtils.isBlank(url)) { return ""; } + if (!Boolean.valueOf(MYSQL_STRONG_SECURITY_ENABLE.getValue())) { + return url; + } String extraParamString = MYSQL_FORCE_PARAMS.getValue(); @@ -86,36 +205,41 @@ public static String appendMysqlForceParams(String url) { return url; } + /** + * append force params, Should be called after the checkJdbcConnParams method + * + * @param extraParams + */ public static void appendMysqlForceParams(Map extraParams) { - extraParams.putAll(parseMysqlUrlParamsToMap(MYSQL_FORCE_PARAMS.getValue())); + if (Boolean.valueOf(MYSQL_STRONG_SECURITY_ENABLE.getValue())) { + extraParams.putAll(parseMysqlUrlParamsToMap(MYSQL_FORCE_PARAMS.getValue())); + } } - public static String checkJdbcSecurity(String url) { - logger.info("checkJdbcSecurity origin url: {}", url); - if (StringUtils.isBlank(url)) { - throw new LinkisSecurityException(35000, "Invalid mysql connection cul, url is empty"); - } - // deal with url encode - try { - url = URLDecoder.decode(url, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new LinkisSecurityException(35000, "mysql connection cul decode error: " + e); + public static String parseParamsMapToMysqlParamUrl(Map params) { + if (params == null || params.isEmpty()) { + return ""; } - if (url.endsWith(QUESTION_MARK) || !url.contains(QUESTION_MARK)) { - logger.info("checkJdbcSecurity target url: {}", url); - return url; + return params.entrySet().stream() + .map(e -> String.join(EQUAL_SIGN, e.getKey(), String.valueOf(e.getValue()))) + .collect(Collectors.joining(AND_SYMBOL)); + } + + /** + * check url, format: jdbc:mysql://host:port/dbname + * + * @param url + */ + public static void checkUrl(String url) { + if (url != null && !url.toLowerCase().startsWith(JDBC_MYSQL_PROTOCOL)) { + return; } - String[] items = url.split("\\?"); - if (items.length != 2) { - logger.warn("Invalid url: {}", url); - throw new LinkisSecurityException(35000, "Invalid mysql connection cul: " + url); + Pattern regex = Pattern.compile(JDBC_MATCH_REGEX.getValue()); + Matcher matcher = regex.matcher(url); + if (!matcher.matches()) { + logger.info("Invalid mysql connection url: {}", url); + throw new LinkisSecurityException(35000, "Invalid mysql connection url."); } - Map params = parseMysqlUrlParamsToMap(items[1]); - Map securityMap = checkJdbcSecurity(params); - String paramUrl = parseParamsMapToMysqlParamUrl(securityMap); - url = items[0] + QUESTION_MARK + paramUrl; - logger.info("checkJdbcSecurity target url: {}", url); - return url; } /** @@ -123,15 +247,9 @@ public static String checkJdbcSecurity(String url) { * * @param paramsMap */ - public static Map checkJdbcSecurity(Map paramsMap) { - if (paramsMap == null) { - return new HashMap<>(); - } - - // mysql url strong security - if (Boolean.valueOf(MYSQL_STRONG_SECURITY_ENABLE.getValue())) { - paramsMap.clear(); - return paramsMap; + private static void checkParams(Map paramsMap) { + if (paramsMap == null || paramsMap.isEmpty()) { + return; } // deal with url encode @@ -163,19 +281,12 @@ public static Map checkJdbcSecurity(Map paramsMa "Invalid mysql connection parameters: " + parseParamsMapToMysqlParamUrl(paramsMap)); } } - return paramsMap; - } - - public static String parseParamsMapToMysqlParamUrl(Map forceParams) { - if (forceParams == null) { - return ""; - } - return forceParams.entrySet().stream() - .map(e -> String.join(EQUAL_SIGN, e.getKey(), String.valueOf(e.getValue()))) - .collect(Collectors.joining(AND_SYMBOL)); } private static Map parseMysqlUrlParamsToMap(String paramsUrl) { + if (StringUtils.isBlank(paramsUrl)) { + return new LinkedHashMap<>(); + } String[] params = paramsUrl.split(AND_SYMBOL); Map map = new LinkedHashMap<>(params.length); for (String param : params) { @@ -209,4 +320,40 @@ private static boolean isNotSecurity(String key, String value, String param) { return key.toLowerCase().contains(param.toLowerCase()) || value.toLowerCase().contains(param.toLowerCase()); } + + /** + * allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false + * + * @return + */ + public static Properties getMysqlSecurityParams() { + Properties properties = new Properties(); + properties.setProperty("allowLoadLocalInfile", "false"); + properties.setProperty("autoDeserialize", "false"); + properties.setProperty("allowLocalInfile", "false"); + properties.setProperty("allowUrlInLocalInfile", "false"); + return properties; + } + + /** + * Check if the path has a relative path + * + * @param path + * @return + */ + public static boolean containsRelativePath(String path) { + if (path.startsWith("./") + || path.contains("/./") + || path.startsWith("../") + || path.contains("/../")) { + return true; + } + if (path.startsWith(".\\") + || path.contains("\\.\\") + || path.startsWith("..\\") + || path.contains("\\..\\")) { + return true; + } + return false; + } } diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java index d71f8b40e63..d1cb59c397b 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java @@ -17,18 +17,19 @@ package org.apache.linkis.common.utils; +import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.common.exception.VariableOperationFailedException; -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; +import java.time.*; import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.Iterator; import java.util.Map; +import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -49,6 +50,9 @@ public class VariableOperationUtils { private static final String[] CYCLES = new String[] {CYCLE_YEAR, CYCLE_MONTH, CYCLE_DAY, CYCLE_HOUR, CYCLE_MINUTE, CYCLE_SECOND}; + private static final ObjectMapper mapper = + JsonMapper.builder().enable(DeserializationFeature.FAIL_ON_TRAILING_TOKENS).build(); + /** * yyyy-MM-dd HH:mm:ss * @@ -56,9 +60,16 @@ public class VariableOperationUtils { * @return */ public static ZonedDateTime toZonedDateTime(Date date, ZoneId zoneId) { - Instant instant = date.toInstant(); - LocalDateTime localDateTime = instant.atZone(zoneId).toLocalDateTime(); - return ZonedDateTime.of(localDateTime, zoneId); + if (Configuration.VARIABLE_OPERATION_USE_NOW()) { + LocalTime currentTime = LocalTime.now(); + LocalDate localDate = date.toInstant().atZone(zoneId).toLocalDate(); + LocalDateTime localDateTime = LocalDateTime.of(localDate, currentTime); + return ZonedDateTime.of(localDateTime, zoneId); + } else { + Instant instant = date.toInstant(); + LocalDateTime localDateTime = instant.atZone(zoneId).toLocalDateTime(); + return ZonedDateTime.of(localDateTime, zoneId); + } } /** @@ -78,30 +89,44 @@ public static ZonedDateTime toZonedDateTime(Date date) { * @param str * @return */ + @Deprecated public static String replaces(ZonedDateTime dateTime, String str) throws VariableOperationFailedException { - return replaces(dateTime, str, true); + try { + JsonNode rootNode = mapper.readTree(str); + if (rootNode.isArray() || rootNode.isObject()) { + replaceJson(dateTime, rootNode); + return rootNode.toString(); + } + } catch (Exception e) { + return replace(dateTime, str); + } + return replace(dateTime, str); } /** * json support variable operation * + * @param codeType * @param dateTime * @param str - * @param format * @return */ - public static String replaces(ZonedDateTime dateTime, String str, boolean format) + public static String replaces(String codeType, ZonedDateTime dateTime, String str) throws VariableOperationFailedException { - try { - JsonNode rootNode = JsonUtils.jackson().readTree(str); - if (rootNode.isArray() || rootNode.isObject()) { - replaceJson(dateTime, rootNode); - return rootNode.toString(); + String languageType = CodeAndRunTypeUtils.getLanguageTypeByCodeType(codeType, ""); + if (languageType.equals(CodeAndRunTypeUtils.LANGUAGE_TYPE_JSON())) { + try { + JsonNode rootNode = mapper.readTree(str); + if (rootNode.isArray() || rootNode.isObject()) { + replaceJson(dateTime, rootNode); + return rootNode.toString(); + } + } catch (Exception e) { + return replace(dateTime, str); } - } catch (Exception e) { - return replace(dateTime, str); } + return replace(dateTime, str); } @@ -197,7 +222,7 @@ private static void replaceJson(ZonedDateTime dateTime, JsonNode object) } else if (temp.isObject()) { replaceJson(dateTime, temp); } else { - arrayNode.insert(i, replace(dateTime, temp.toString())); + arrayNode.set(i, replace(dateTime, temp.toString())); } } } else if (object.isObject()) { diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala index 55535e5336c..9bfa053b77b 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala @@ -22,7 +22,7 @@ import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils -import java.io.{File, FileInputStream, InputStream, IOException} +import java.io._ import java.util.Properties import java.util.concurrent.TimeUnit import java.util.concurrent.locks.ReentrantReadWriteLock @@ -140,15 +140,20 @@ private[conf] object BDPConfiguration extends Logging { private def initConfig(config: Properties, filePath: String) { var inputStream: InputStream = null - + var reader: InputStreamReader = null + var buff: BufferedReader = null Utils.tryFinally { Utils.tryCatch { inputStream = new FileInputStream(filePath) - config.load(inputStream) + reader = new InputStreamReader(inputStream, "UTF-8") + buff = new BufferedReader(reader) + config.load(buff) } { case e: IOException => logger.error("Can't load " + filePath, e) } } { + IOUtils.closeQuietly(buff) + IOUtils.closeQuietly(reader) IOUtils.closeQuietly(inputStream) } } @@ -227,19 +232,20 @@ private[conf] object BDPConfiguration extends Logging { private[common] def formatValue[T](defaultValue: T, value: Option[String]): Option[T] = { if (value.isEmpty || value.exists(StringUtils.isEmpty)) return Option(defaultValue) + val trimValue = value.map(_.trim) val formattedValue = defaultValue match { - case _: String => value - case _: Byte => value.map(_.toByte) - case _: Short => value.map(_.toShort) - case _: Char => value.map(_.toCharArray.apply(0)) - case _: Int => value.map(_.toInt) - case _: Long => value.map(_.toLong) - case _: Float => value.map(_.toFloat) - case _: Double => value.map(_.toDouble) - case _: Boolean => value.map(_.toBoolean) - case _: TimeType => value.map(new TimeType(_)) - case _: ByteType => value.map(new ByteType(_)) - case null => value + case _: String => trimValue + case _: Byte => trimValue.map(_.toByte) + case _: Short => trimValue.map(_.toShort) + case _: Char => trimValue.map(_.toCharArray.apply(0)) + case _: Int => trimValue.map(_.toInt) + case _: Long => trimValue.map(_.toLong) + case _: Float => trimValue.map(_.toFloat) + case _: Double => trimValue.map(_.toDouble) + case _: Boolean => trimValue.map(_.toBoolean) + case _: TimeType => trimValue.map(new TimeType(_)) + case _: ByteType => trimValue.map(new ByteType(_)) + case null => trimValue } formattedValue.asInstanceOf[Option[T]] } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala index 8ac94739c46..dd4570d95b1 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala @@ -17,7 +17,7 @@ package org.apache.linkis.common.conf -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, RSAUtils} import org.apache.commons.lang3.StringUtils @@ -31,8 +31,12 @@ object Configuration extends Logging { val IS_TEST_MODE = CommonVars("wds.linkis.test.mode", false) + val LINKIS_SYS_NAME = CommonVars("linkis.system.name", "") + val IS_PROMETHEUS_ENABLE = CommonVars("wds.linkis.prometheus.enable", false) + val IS_MULTIPLE_YARN_CLUSTER = CommonVars("linkis.multiple.yarn.cluster", false).getValue + val PROMETHEUS_ENDPOINT = CommonVars("wds.linkis.prometheus.endpoint", "/actuator/prometheus") val LINKIS_HOME = CommonVars("wds.linkis.home", CommonVars("LINKIS_HOME", "/tmp").getValue) @@ -53,6 +57,9 @@ object Configuration extends Logging { val CLOUD_CONSOLE_VARIABLE_SPRING_APPLICATION_NAME = CommonVars("wds.linkis.console.variable.application.name", "linkis-ps-publicservice") + val JOBHISTORY_SPRING_APPLICATION_NAME = + CommonVars("wds.linkis.jobhistory.application.name", "linkis-ps-jobhistory") + // read from env val PREFER_IP_ADDRESS: Boolean = CommonVars( "linkis.discovery.prefer-ip-address", @@ -63,10 +70,24 @@ object Configuration extends Logging { val JOB_HISTORY_ADMIN = CommonVars("wds.linkis.jobhistory.admin", "hadoop") + val JOB_HISTORY_DEPARTMENT_ADMIN = CommonVars("wds.linkis.jobhistory.department.admin", "hadoop") + + val JOB_RESULT_DEPARTMENT_LIMIT = + CommonVars("linkis.jobhistory.result.limit.department", "") + // Only the specified token has permission to call some api val GOVERNANCE_STATION_ADMIN_TOKEN_STARTWITH = "ADMIN-" - val VARIABLE_OPERATION: Boolean = CommonVars("wds.linkis.variable.operation", false).getValue + val VARIABLE_OPERATION_USE_NOW: Boolean = + CommonVars("wds.linkis.variable.operation.use.now", true).getValue + + val IS_VIEW_FS_ENV = CommonVars("wds.linkis.env.is.viewfs", true) + + val LINKIS_RSA_TOKEN_SWITCH = CommonVars("linkis.rsa.token.switch", false).getValue + + val LINKIS_RSA_PUBLIC_KEY = CommonVars("linkis.rsa.public.key", "") + + val LINKIS_RSA_PRIVATE_KEY = CommonVars("linkis.rsa.private.key", "") val ERROR_MSG_TIP = CommonVars( @@ -74,11 +95,37 @@ object Configuration extends Logging { "The request interface %s is abnormal. You can try to troubleshoot common problems in the knowledge base document" ) + val LINKIS_TOKEN = CommonVars("wds.linkis.token", "") + + val HDFS_HOUR_DIR_SWITCH = CommonVars("linkis.hdfs.hour.dir.switch", false).getValue + + val LINKIS_KEYTAB_SWITCH: Boolean = CommonVars("linkis.keytab.switch", false).getValue + + val METRICS_INCREMENTAL_UPDATE_ENABLE = + CommonVars[Boolean]("linkis.jobhistory.metrics.incremental.update.enable", false) + + val GLOBAL_CONF_CHN_NAME = "全局设置" + + val GLOBAL_CONF_CHN_OLDNAME = "通用设置" + + val GLOBAL_CONF_CHN_EN_NAME = "GlobalSettings" + + val GLOBAL_CONF_SYMBOL = "*" + + val GLOBAL_CONF_LABEL = "*-*,*-*" + def isAdminToken(token: String): Boolean = { if (StringUtils.isBlank(token)) { false } else { - token.toUpperCase().startsWith(GOVERNANCE_STATION_ADMIN_TOKEN_STARTWITH) + if (Configuration.LINKIS_RSA_TOKEN_SWITCH && token.startsWith(RSAUtils.PREFIX)) { + RSAUtils + .dncryptWithLinkisPublicKey(token) + .toUpperCase() + .contains(GOVERNANCE_STATION_ADMIN_TOKEN_STARTWITH) + } else { + token.toUpperCase().contains(GOVERNANCE_STATION_ADMIN_TOKEN_STARTWITH) + } } } @@ -122,10 +169,27 @@ object Configuration extends Logging { .exists(username.equalsIgnoreCase) } + def isDepartmentAdmin(username: String): Boolean = { + val departmentAdminUsers = JOB_HISTORY_DEPARTMENT_ADMIN.getHotValue.split(",") + departmentAdminUsers.exists(username.equalsIgnoreCase) + } + def getJobHistoryAdmin(): Array[String] = { val adminUsers = GOVERNANCE_STATION_ADMIN.getHotValue.split(",") val historyAdminUsers = JOB_HISTORY_ADMIN.getHotValue.split(",") (adminUsers ++ historyAdminUsers).distinct } + def getGlobalCreator(creator: String): String = creator match { + case Configuration.GLOBAL_CONF_CHN_NAME | Configuration.GLOBAL_CONF_CHN_OLDNAME | + Configuration.GLOBAL_CONF_CHN_EN_NAME => + GLOBAL_CONF_SYMBOL + case _ => creator + } + + def canResultSetByDepartment(departmentId: String): Boolean = { + val jobResultLimit = JOB_RESULT_DEPARTMENT_LIMIT.getHotValue.split(",") + !jobResultLimit.exists(departmentId.equalsIgnoreCase) + } + } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala index 77c82f38838..e558e765bed 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala @@ -33,7 +33,7 @@ object LogUtils { } def generateERROR(rawLog: String): String = { - getTimeFormat + " " + "ERROR" + " " + rawLog + getTimeFormat + " " + ERROR_STR + " " + rawLog } def generateWarn(rawLog: String): String = { @@ -52,4 +52,6 @@ object LogUtils { getTimeFormat + " " + "SYSTEM-WARN" + " " + rawLog } + val ERROR_STR = "ERROR" + } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala index 3870fe6e584..9bbd3201186 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala @@ -21,6 +21,10 @@ import org.apache.linkis.common.conf.CommonVars import org.apache.commons.lang3.StringUtils +import java.util.Locale + +import scala.collection.mutable + object CodeAndRunTypeUtils { private val CONF_LOCK = new Object() @@ -29,7 +33,7 @@ object CodeAndRunTypeUtils { */ val CODE_TYPE_AND_RUN_TYPE_RELATION = CommonVars( "linkis.codeType.language.relation", - "sql=>sql|hql|jdbc|hive|psql|fql|tsql,python=>python|py|pyspark,java=>java,scala=>scala,shell=>sh|shell,json=>json|data_calc" + "sql=>sql|hql|jdbc|hive|psql|fql|tsql|nebula|ngql|aisql|starrocks,python=>python|py|pyspark|py3,java=>java,scala=>scala,shell=>sh|shell,json=>json|data_calc" ) val LANGUAGE_TYPE_SQL = "sql" @@ -44,6 +48,8 @@ object CodeAndRunTypeUtils { val LANGUAGE_TYPE_JSON = "json" + val LANGUAGE_TYPE_AI_SQL = "aisql" + private var codeTypeAndLanguageTypeRelationMap: Map[String, List[String]] = null /** @@ -101,14 +107,23 @@ object CodeAndRunTypeUtils { def getLanguageTypeAndCodeTypeRelationMap: Map[String, String] = { val codeTypeAndRunTypeRelationMap = getCodeTypeAndLanguageTypeRelationMap if (codeTypeAndRunTypeRelationMap.isEmpty) Map() - else codeTypeAndRunTypeRelationMap.flatMap(x => x._2.map(y => (y, x._1))) + else { +// codeTypeAndRunTypeRelationMap.flatMap(x => x._2.map(y => (y, x._1))) + val map = mutable.Map[String, String]() + codeTypeAndRunTypeRelationMap.foreach(kv => { + kv._2.foreach(v => map.put(v, kv._1)) + }) + map.toMap + } } def getLanguageTypeByCodeType(codeType: String, defaultLanguageType: String = ""): String = { if (StringUtils.isBlank(codeType)) { return "" } - getLanguageTypeAndCodeTypeRelationMap.getOrElse(codeType, defaultLanguageType) + val lowerCaseCodeType = codeType.toLowerCase(Locale.getDefault) + getLanguageTypeAndCodeTypeRelationMap.getOrElse(lowerCaseCodeType, defaultLanguageType) + } /** diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala index b53184eceb4..f298d5af5bc 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/LDAPUtils.scala @@ -19,12 +19,17 @@ package org.apache.linkis.common.utils import org.apache.linkis.common.conf.CommonVars +import org.apache.commons.codec.binary.Hex import org.apache.commons.lang3.StringUtils import javax.naming.Context import javax.naming.ldap.InitialLdapContext +import java.nio.charset.StandardCharsets import java.util.Hashtable +import java.util.concurrent.TimeUnit + +import com.google.common.cache.{Cache, CacheBuilder, RemovalListener, RemovalNotification} object LDAPUtils extends Logging { @@ -38,7 +43,33 @@ object LDAPUtils extends Logging { val baseDN = CommonVars("wds.linkis.ldap.proxy.baseDN", "").getValue val userNameFormat = CommonVars("wds.linkis.ldap.proxy.userNameFormat", "").getValue + private val storeUser: Cache[String, String] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(20, TimeUnit.MINUTES) + .removalListener(new RemovalListener[String, String] { + + override def onRemoval(removalNotification: RemovalNotification[String, String]): Unit = { + logger.info(s"store user remove key: ${removalNotification.getKey}") + } + + }) + .build() + def login(userID: String, password: String): Unit = { + + val saltPwd = storeUser.getIfPresent(userID) + if (StringUtils.isNotBlank(saltPwd)) { + Utils.tryAndWarn { + if ( + saltPwd.equalsIgnoreCase(Hex.encodeHexString(password.getBytes(StandardCharsets.UTF_8))) + ) { + logger.info(s"user $userID login success for storeUser") + return + } + } + } + val env = new Hashtable[String, String]() val bindDN = if (StringUtils.isBlank(userNameFormat)) userID @@ -53,6 +84,9 @@ object LDAPUtils extends Logging { env.put(Context.SECURITY_CREDENTIALS, bindPassword) new InitialLdapContext(env, null) + Utils.tryAndWarn { + storeUser.put(userID, Hex.encodeHexString(password.getBytes(StandardCharsets.UTF_8))) + } logger.info(s"user $userID login success.") } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/RSAUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/RSAUtils.scala index 746b3600a6c..4a34db89765 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/RSAUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/RSAUtils.scala @@ -17,16 +17,22 @@ package org.apache.linkis.common.utils +import org.apache.linkis.common.conf.Configuration + import org.apache.commons.codec.binary.Hex import org.apache.commons.net.util.Base64 import javax.crypto.Cipher +import java.net.URLDecoder import java.nio.charset.StandardCharsets -import java.security.{KeyPair, KeyPairGenerator, PrivateKey, PublicKey} +import java.security.{KeyFactory, KeyPair, KeyPairGenerator, PrivateKey, PublicKey} +import java.security.spec.{PKCS8EncodedKeySpec, X509EncodedKeySpec} + +object RSAUtils extends Logging { + private implicit val keyPair = genKeyPair(2048) -object RSAUtils { - private implicit val keyPair = genKeyPair(1024) + implicit val PREFIX = "{RSA}" def genKeyPair(keyLength: Int): KeyPair = { val keyPair = KeyPairGenerator.getInstance("RSA") @@ -64,4 +70,112 @@ object RSAUtils { } def decrypt(data: Array[Byte]): Array[Byte] = decrypt(data, keyPair.getPrivate) + + /** + * 将字符串形式的公钥转换为 PublicKey 对象。 + * + * @param publicKeyStr + * 公钥字符串,Base64 编码 + * @return + * 转换后的 PublicKey 对象 + */ + def stringToPublicKey(publicKeyStr: String): PublicKey = { + val keyBytes = Base64.decodeBase64(publicKeyStr) + val keySpec = new X509EncodedKeySpec(keyBytes) + val keyFactory = KeyFactory.getInstance("RSA") + keyFactory.generatePublic(keySpec) + } + + /** + * 将字符串形式的私钥转换为 PrivateKey 对象。 + * + * @param privateKeyStr + * 私钥字符串,Base64 编码 + * @return + * 转换后的 PrivateKey 对象 + */ + def stringToPrivateKey(privateKeyStr: String): PrivateKey = { + val keyBytes = Base64.decodeBase64(privateKeyStr) + val keySpec = new PKCS8EncodedKeySpec(keyBytes) + val keyFactory = KeyFactory.getInstance("RSA") + keyFactory.generatePrivate(keySpec) + } + + /** + * 使用 Linkis 配置文件中的公钥对数据进行加密。 + * + * @param data + * 需要加密的原始数据字符串 + * @return + * 加密后的数据字符串,带有前缀 + */ + def encryptWithLinkisPublicKey(data: String): String = { + // 从配置文件中获取公钥和私钥字符串 + val publicKey = Configuration.LINKIS_RSA_PUBLIC_KEY.getValue + val privateKey = Configuration.LINKIS_RSA_PRIVATE_KEY.getValue + // 将公钥和私钥字符串转换为 KeyPair 对象 + val keyPair = + new KeyPair(RSAUtils.stringToPublicKey(publicKey), RSAUtils.stringToPrivateKey(privateKey)) + // 使用公钥对数据进行加密 + val encryptedData = RSAUtils.encrypt(data.getBytes, keyPair.getPublic) + // 将加密后的数据进行 Base64 编码,并添加前缀 + val encodedEncryptedData = + PREFIX + new String(Base64.encodeBase64URLSafe(encryptedData)) + encodedEncryptedData + } + + /** + * 使用 Linkis 配置文件中的私钥对数据进行解密。 + * + * @param data + * 需要解密的加密数据字符串,带有前缀 + * @return + * 解密后的原始数据字符串 + */ + def dncryptWithLinkisPublicKey(data: String): String = { + // 从配置文件中获取公钥和私钥字符串 + val publicKey = Configuration.LINKIS_RSA_PUBLIC_KEY.getValue + val privateKey = Configuration.LINKIS_RSA_PRIVATE_KEY.getValue + val decodedData = URLDecoder.decode(data, "UTF-8") + // 将公钥和私钥字符串转换为 KeyPair 对象 + val keyPair = + new KeyPair(RSAUtils.stringToPublicKey(publicKey), RSAUtils.stringToPrivateKey(privateKey)) + // 检查数据是否以指定前缀开头 + if (decodedData.startsWith(PREFIX)) { + // 去掉前缀,获取加密数据部分 + val dataSub = decodedData.substring(5) + // 将加密数据进行 Base64 解码 + val decodedEncryptedData = Base64.decodeBase64(dataSub) + // 使用私钥对数据进行解密 + val decryptedData = RSAUtils.decrypt(decodedEncryptedData, keyPair.getPrivate) + // 将解密后的数据转换为字符串 + val decryptedString = new String(decryptedData) + decryptedString + } else { + logger.warn(s"token信息非$PREFIX 开头,不执行解密!") + data + } + } + + /** + * 从给定的 token 中提取前半部分字符串。 + * + * @param token + * 输入的完整 token 字符串。 + * @return + * 提取的 token 后半部分字符串。 + */ + def tokenSubRule(token: String): String = { + val lowerToken = token.toLowerCase() + // 判断条件: + // 1. 以 "-auth" 结尾(不区分大小写)且长度 < 12 + // 2. 或者长度 < 10 + if ((lowerToken.endsWith("-auth") && lowerToken.length < 12) || lowerToken.length < 10) { + token // 不截取,原样返回 + } else { + // 否则,取后半部分(原逻辑) + token.substring(token.length / 2, token.length) + } + } + } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala index 80e3ff7e5e0..67dfc0971a6 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/Utils.scala @@ -43,6 +43,8 @@ import org.slf4j.Logger object Utils extends Logging { + val DEFAULE_SCHEDULER_THREAD_NAME_PREFIX = "Linkis-Default-Scheduler-Thread-" + def tryQuietly[T](tryOp: => T): T = tryQuietly(tryOp, _ => ()) def tryCatch[T](tryOp: => T)(catchOp: Throwable => T): T = { @@ -181,6 +183,15 @@ object Utils extends Logging { ): ExecutionContextExecutorService = ExecutionContext.fromExecutorService(newCachedThreadPool(threadNum, threadName, isDaemon)) + def newCachedExecutionContextWithExecutor( + threadNum: Int, + threadName: String, + isDaemon: Boolean = true + ): (ExecutionContextExecutorService, ThreadPoolExecutor) = { + val threadPool: ThreadPoolExecutor = newCachedThreadPool(threadNum, threadName, isDaemon) + (ExecutionContext.fromExecutorService(threadPool), threadPool) + } + def newFixedThreadPool( threadNum: Int, threadName: String, @@ -199,7 +210,7 @@ object Utils extends Logging { val defaultScheduler: ScheduledThreadPoolExecutor = { val scheduler = - new ScheduledThreadPoolExecutor(20, threadFactory("Linkis-Default-Scheduler-Thread-", true)) + new ScheduledThreadPoolExecutor(20, threadFactory(DEFAULE_SCHEDULER_THREAD_NAME_PREFIX, true)) scheduler.setMaximumPoolSize(20) scheduler.setKeepAliveTime(5, TimeUnit.MINUTES) scheduler diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala index 30bdeb4b147..bd2fab49302 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala @@ -43,6 +43,8 @@ object VariableUtils extends Logging { val RUN_TODAY_H = "run_today_h" + val RUN_TODAY_HOUR = "run_today_hour" + private val codeReg = "\\$\\{\\s*[A-Za-z][A-Za-z0-9_\\.]*\\s*[\\+\\-\\*/]?\\s*[A-Za-z0-9_\\.]*\\s*\\}".r @@ -83,6 +85,13 @@ object VariableUtils extends Logging { nameAndType(RUN_TODAY_H) = HourType(runTodayH) } } + if (variables.containsKey(RUN_TODAY_HOUR)) { + val runTodayHourStr = variables.get(RUN_TODAY_HOUR).asInstanceOf[String] + if (StringUtils.isNotBlank(runTodayHourStr)) { + val runTodayHour = new CustomHourType(runTodayHourStr, false) + nameAndType(RUN_TODAY_HOUR) = HourType(runTodayHour) + } + } initAllDateVars(run_date, nameAndType) val codeOperation = parserVar(replaceStr, nameAndType) parserDate(codeOperation, run_date) @@ -141,18 +150,27 @@ object VariableUtils extends Logging { nameAndType(RUN_TODAY_H) = HourType(runTodayH) } } + if (variables.containsKey(RUN_TODAY_HOUR)) { + val runTodayHourStr = variables.get(RUN_TODAY_HOUR).asInstanceOf[String] + if (StringUtils.isNotBlank(runTodayHourStr)) { + val runTodayHour = new CustomHourType(runTodayHourStr, false) + nameAndType(RUN_TODAY_HOUR) = HourType(runTodayHour) + } + } initAllDateVars(run_date, nameAndType) val codeOperation = parserVar(code, nameAndType) - parserDate(codeOperation, run_date) + parserDate(codeType, codeOperation, run_date) } + @deprecated private def parserDate(code: String, run_date: CustomDateType): String = { - if (Configuration.VARIABLE_OPERATION) { - val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) - VariableOperationUtils.replaces(zonedDateTime, code) - } else { - code - } + val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) + VariableOperationUtils.replaces(zonedDateTime, code) + } + + private def parserDate(codeType: String, code: String, run_date: CustomDateType): String = { + val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) + VariableOperationUtils.replaces(codeType, zonedDateTime, code) } private def initAllDateVars( @@ -255,6 +273,30 @@ object VariableUtils extends Logging { nameAndType("run_today_h_std") = HourType( new CustomHourType(nameAndType(RUN_TODAY_H).asInstanceOf[HourType].getValue, true) ) + // calculate run_today_hour base on run_date + if (nameAndType.contains("run_today_hour")) { + nameAndType("run_today_hour").asInstanceOf[HourType] + } else { + val run_today_hour = new CustomHourType(getCurHour(false, run_today.toString), false) + nameAndType("run_today_hour") = HourType(run_today_hour) + } + nameAndType("run_today_hour_std") = HourType( + new CustomHourType(nameAndType("run_today_hour").asInstanceOf[HourType].getValue, true) + ) + // calculate run_last_mon base on run_today + val run_roday_mon = new CustomMonType(getMonthDay(false, run_today.getDate), false) + nameAndType("run_last_mon_now") = MonType(new CustomMonType(run_roday_mon - 1, false, false)) + nameAndType("run_last_mon_now_std") = MonType(new CustomMonType(run_roday_mon - 1, true, false)) + // calculate run_current_mon_now base on run_today + nameAndType("run_current_mon_now") = MonType( + new CustomMonType(run_roday_mon.toString, false, false) + ) + nameAndType("run_current_mon_now_std") = MonType( + new CustomMonType(run_roday_mon.toString, true, false) + ) + // calculate run_mon_now base on run_today + nameAndType("run_mon_now") = MonType(new CustomMonType(run_roday_mon.toString, false, false)) + nameAndType("run_mon_now_std") = MonType(new CustomMonType(run_roday_mon.toString, true, false)) } /** @@ -337,7 +379,7 @@ object VariableUtils extends Logging { * * @param code * :code - * @param codeType + * @param languageType * :SQL,PYTHON * @return */ @@ -346,27 +388,37 @@ object VariableUtils extends Logging { var varString: String = null var errString: String = null + var rightVarString: String = null languageType match { case CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL => varString = """\s*--@set\s*.+\s*""" + rightVarString = """^\s*--@set\s*.+\s*""" errString = """\s*--@.*""" case CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON | CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL => varString = """\s*#@set\s*.+\s*""" + rightVarString = """^\s*#@set\s*.+\s*""" errString = """\s*#@""" case CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA => varString = """\s*//@set\s*.+\s*""" + rightVarString = """^\s*//@set\s*.+\s*""" errString = """\s*//@.+""" case CodeAndRunTypeUtils.LANGUAGE_TYPE_JAVA => varString = """\s*!!@set\s*.+\s*""" + rightVarString = """^\s*!!@set\s*.+\s*""" case _ => return nameAndValue } val customRegex = varString.r.unanchored + val customRightRegex = rightVarString.r.unanchored val errRegex = errString.r.unanchored code.split("\n").foreach { str => { + + if (customRightRegex.unapplySeq(str).size < customRegex.unapplySeq(str).size) { + logger.warn(s"code:$str is wrong custom variable format!!!") + } str match { case customRegex() => val clearStr = if (str.endsWith(";")) str.substring(0, str.length - 1) else str diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala index 4359df33984..0c528a4a9b6 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala @@ -79,20 +79,12 @@ class CustomMonthType(date: String, std: Boolean = true, isEnd: Boolean = false) def -(months: Int): String = { val dateFormat = DateTypeUtils.dateFormatLocal.get() - if (std) { - DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months)) - } else { - DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months)) - } + DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months)) } def +(months: Int): String = { val dateFormat = DateTypeUtils.dateFormatLocal.get() - if (std) { - DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months)) - } else { - DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months)) - } + DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months)) } override def toString: String = { @@ -111,20 +103,12 @@ class CustomMonType(date: String, std: Boolean = true, isEnd: Boolean = false) { def -(months: Int): String = { val dateFormat = DateTypeUtils.dateFormatMonLocal.get() - if (std) { - DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months)) - } else { - DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months)) - } + DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months)) } def +(months: Int): String = { val dateFormat = DateTypeUtils.dateFormatMonLocal.get() - if (std) { - DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months)) - } else { - DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months)) - } + DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months)) } override def toString: String = { diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/DateTypeUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/DateTypeUtils.scala index df6dff865d7..ed97be83daf 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/DateTypeUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/DateTypeUtils.scala @@ -46,6 +46,10 @@ object DateTypeUtils { override protected def initialValue = new SimpleDateFormat("yyyy-MM-dd HH") } + val dateFormatSecondLocal = new ThreadLocal[SimpleDateFormat]() { + override protected def initialValue = new SimpleDateFormat("yyyyMMddHHmmss") + } + /** * Get Today"s date * diff --git a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/conf/BDPConfigurationTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/conf/BDPConfigurationTest.java new file mode 100644 index 00000000000..5a025eb8b02 --- /dev/null +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/conf/BDPConfigurationTest.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.conf; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** BDPConfiguration Tester */ +public class BDPConfigurationTest { + + @Test + public void testGetOption() { + + Assertions.assertEquals( + "properties支持中文", + BDPConfiguration.getOption( + CommonVars.apply("linkis.jobhistory.error.msg.tip", "properties支持中文")) + .get()); + + Assertions.assertEquals( + "properties支持中文(默认)", + BDPConfiguration.getOption( + CommonVars.apply("linkis.jobhistory.error.msg.tip1", "properties支持中文(默认)")) + .get()); + } +} diff --git a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/ByteTimeUtilsTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/ByteTimeUtilsTest.java new file mode 100644 index 00000000000..f548d89d462 --- /dev/null +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/ByteTimeUtilsTest.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import java.util.function.Function; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class ByteTimeUtilsTest { + + private static final ImmutableMap> opFunction = + ImmutableMap.>builder() + .put("byteStringAsBytes", tar -> ByteTimeUtils.byteStringAsBytes(tar)) + .put("byteStringAsKb", tar -> ByteTimeUtils.byteStringAsKb(tar)) + .put("byteStringAsMb", tar -> ByteTimeUtils.byteStringAsMb(tar)) + .put("byteStringAsGb", tar -> ByteTimeUtils.byteStringAsGb(tar)) + .build(); + + private static final ImmutableMap convertToByte = + ImmutableMap.builder() + .put("1", 1l) + .put("1b", 1l) + .put("1B", 1l) + .put("1k", 1024l) + .put("1K", 1024l) + .put("1kb", 1024l) + .put("1Kb", 1024l) + .put("1kB", 1024l) + .put("1KB", 1024l) + .put("1m", 1024l * 1024l) + .put("1M", 1024l * 1024l) + .put("1mb", 1024l * 1024l) + .put("1Mb", 1024l * 1024l) + .put("1mB", 1024l * 1024l) + .put("1MB", 1024l * 1024l) + .put("1g", 1024l * 1024l * 1024l) + .put("1G", 1024l * 1024l * 1024l) + .put("1gb", 1024l * 1024l * 1024l) + .put("1gB", 1024l * 1024l * 1024l) + .put("1Gb", 1024l * 1024l * 1024l) + .put("1GB", 1024l * 1024l * 1024l) + .put("1t", 1024l * 1024l * 1024l * 1024l) + .put("1T", 1024l * 1024l * 1024l * 1024l) + .put("1tb", 1024l * 1024l * 1024l * 1024l) + .put("1Tb", 1024l * 1024l * 1024l * 1024l) + .put("1tB", 1024l * 1024l * 1024l * 1024l) + .put("1TB", 1024l * 1024l * 1024l * 1024l) + .put("1p", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1P", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1pb", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1Pb", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1pB", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1PB", 1024l * 1024l * 1024l * 1024l * 1024l) + .build(); + + private static final ImmutableMap convertToKB = + ImmutableMap.builder() + .put("1", 1l) + .put("1024b", 1l) + .put("1024B", 1l) + .put("1k", 1l) + .put("1K", 1l) + .put("1kb", 1l) + .put("1Kb", 1l) + .put("1kB", 1l) + .put("1KB", 1l) + .put("1m", 1024l) + .put("1M", 1024l) + .put("1mb", 1024l) + .put("1Mb", 1024l) + .put("1mB", 1024l) + .put("1MB", 1024l) + .put("1g", 1024l * 1024l) + .put("1G", 1024l * 1024l) + .put("1gb", 1024l * 1024l) + .put("1gB", 1024l * 1024l) + .put("1Gb", 1024l * 1024l) + .put("1GB", 1024l * 1024l) + .build(); + + private static final ImmutableMap convertToMB = + ImmutableMap.builder() + .put("1", 1l) + .put("1024k", 1l) + .put("1024K", 1l) + .put("1024kb", 1l) + .put("1024Kb", 1l) + .put("1024kB", 1l) + .put("1024KB", 1l) + .put("1m", 1l) + .put("1M", 1l) + .put("1mb", 1l) + .put("1Mb", 1l) + .put("1mB", 1l) + .put("1MB", 1l) + .put("1g", 1024l) + .put("1G", 1024l) + .put("1gb", 1024l) + .put("1gB", 1024l) + .put("1Gb", 1024l) + .put("1GB", 1024l) + .build(); + + private static final ImmutableMap convertToGB = + ImmutableMap.builder() + .put("1", 1l) + .put("1024m", 1l) + .put("1024M", 1l) + .put("1024mb", 1l) + .put("1024Mb", 1l) + .put("1024mB", 1l) + .put("1024MB", 1l) + .put("1g", 1l) + .put("1G", 1l) + .put("1gb", 1l) + .put("1gB", 1l) + .put("1Gb", 1l) + .put("1GB", 1l) + .put("1t", 1024l) + .put("1T", 1024l) + .put("1tb", 1024l) + .put("1Tb", 1024l) + .put("1tB", 1024l) + .put("1TB", 1024l) + .build(); + + @Test + void byteStringAsBytes() { + convertToByte.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsBytes").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsBytes").apply("1A")); + } + + @Test + void byteStringAsKb() { + convertToKB.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsKb").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsKb").apply("1a")); + } + + @Test + void byteStringAsMb() { + convertToMB.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsMb").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsMb").apply("1c")); + } + + @Test + void byteStringAsGb() { + convertToGB.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsGb").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsGb").apply("1C")); + } +} diff --git a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/SecurityUtilsTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/SecurityUtilsTest.java index 4fdca7b82ac..6953b8835b9 100644 --- a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/SecurityUtilsTest.java +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/SecurityUtilsTest.java @@ -17,166 +17,326 @@ package org.apache.linkis.common.utils; +import org.apache.linkis.common.conf.BDPConfiguration; import org.apache.linkis.common.exception.LinkisSecurityException; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; /** SecurityUtils Tester */ public class SecurityUtilsTest { + @BeforeAll + public static void init() { + BDPConfiguration.set("linkis.mysql.strong.security.enable", "true"); + } + @Test - public void testAppendMysqlForceParamsUrl() throws Exception { - // allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false + public void testCheckUrl() { + // true String url = "jdbc:mysql://127.0.0.1:10000/db_name"; - String newUrl = SecurityUtils.appendMysqlForceParams(url); - Assertions.assertEquals( - url - + "?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", - newUrl); - - url = "jdbc:mysql://127.0.0.1:10000/db_name?"; - newUrl = SecurityUtils.appendMysqlForceParams(url); - Assertions.assertEquals( - url - + "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", - newUrl); - - url = "jdbc:mysql://127.0.0.1:10000/db_name?p1=v1"; - newUrl = SecurityUtils.appendMysqlForceParams(url); - Assertions.assertEquals( - url - + "&" - + "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", - newUrl); + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkUrl(url); + }); + // false + String url1 = "jdbc:mysql://127.0.0.1:10000/db_name?"; + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkUrl(url1); + }); + // false + String url11 = "jdbc:mysql://127.0.0.1:10000/db_name?abc"; + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkUrl(url11); + }); + // true + String url2 = "jdbc:mysql://127.0.0.1:10000/"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkUrl(url2); + }); + // true + String url3 = "jdbc:mysql://127.0.0.1:10000"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkUrl(url3); + }); + // true + String url4 = "JDBC:mysql://127.0.0.1:10000/db_name"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkUrl(url4); + }); + // true + String url5 = "JDBC:H2://127.0.0.1:10000/db_name"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkUrl(url5); + }); + // true + String url6 = "JDBC:H2://test-example.com:10000/db_name"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkUrl(url6); + }); + // true + String url7 = "JDBC:H2://example.测试:10000/db_name"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkUrl(url7); + }); } @Test - public void testAppendMysqlForceParamsExtraParams() throws Exception { - Map extraParams = new HashMap<>(); - extraParams.put("testKey", "testValue"); - SecurityUtils.appendMysqlForceParams(extraParams); - Assertions.assertEquals("false", extraParams.get("allowLoadLocalInfile")); - Assertions.assertEquals("false", extraParams.get("autoDeserialize")); - Assertions.assertEquals("false", extraParams.get("allowLocalInfile")); - Assertions.assertEquals("false", extraParams.get("allowUrlInLocalInfile")); - Assertions.assertEquals("testValue", extraParams.get("testKey")); - Assertions.assertEquals(null, extraParams.get("otherKey")); + public void testGetUrl() { + BDPConfiguration.set("linkis.mysql.strong.security.enable", "true"); + String baseUrl = "jdbc:mysql://127.0.0.1:10000/db_name"; + String securityStr = + "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false"; + String url1 = "jdbc:mysql://127.0.0.1:10000/db_name"; + Assertions.assertEquals(baseUrl, SecurityUtils.getJdbcUrl(url1)); + String url11 = "jdbc:mysql://127.0.0.1:10000/db_name?"; + Assertions.assertEquals(baseUrl, SecurityUtils.getJdbcUrl(url11)); + String url2 = "jdbc:mysql://127.0.0.1:10000/db_name?k1=v1&"; + Assertions.assertEquals(baseUrl + "?k1=v1&" + securityStr, SecurityUtils.getJdbcUrl(url2)); + String url3 = "jdbc:mysql://127.0.0.1:10000/db_name?k1=v1&k2"; + Assertions.assertEquals(baseUrl + "?k1=v1&" + securityStr, SecurityUtils.getJdbcUrl(url3)); } @Test - public void testCheckJdbcSecurityUrl() throws Exception { - String url = "jdbc:mysql://127.0.0.1:10000/db_name"; - String newUrl = SecurityUtils.checkJdbcSecurity(url); - Assertions.assertEquals(url, newUrl); + public void testRSA() { + String originalData = "rsa-test-str"; + String encryptData = RSAUtils.encryptWithLinkisPublicKey(originalData); + String dncryptData = RSAUtils.dncryptWithLinkisPublicKey(encryptData); + Assertions.assertEquals(dncryptData, originalData); + } - url = "jdbc:mysql://127.0.0.1:10000/db_name?"; - newUrl = SecurityUtils.checkJdbcSecurity(url); - Assertions.assertEquals(url, newUrl); + @Test + public void testCheckJdbcConnParams() { + String host = "127.0.0.1"; + Integer port = 3306; + String username = "test"; + String password = "test"; + String database = "tdb"; + Map extraParams = new HashMap<>(); + extraParams.put("k1", "v1"); - url = "jdbc:mysql://127.0.0.1:10000/db_name?p1=v1"; - newUrl = SecurityUtils.checkJdbcSecurity(url); - Assertions.assertEquals(url, newUrl); + // match ip + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); + }); + String host1 = "localhost"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnParams(host1, port, username, password, database, extraParams); + }); - // key is not security - url = "jdbc:mysql://127.0.0.1:10000/db_name?p1=v1&allowLocalInfile=true"; - AtomicReference atomUrl = new AtomicReference<>(url); + // match domain + String host2 = "www.apache.com"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnParams(host2, port, username, password, database, extraParams); + }); + + // error host + String host3 = "localhost:3306"; Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(atomUrl.get()); + SecurityUtils.checkJdbcConnParams(host3, port, username, password, database, extraParams); }); - // url encode - url = "jdbc:mysql://127.0.0.1:10000/db_name?allowLocalInfil%65=true"; - atomUrl.set(url); + String host4 = "localhost:3306/test"; Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(atomUrl.get()); + SecurityUtils.checkJdbcConnParams(host4, port, username, password, database, extraParams); }); - // value is not security - url = "jdbc:mysql://127.0.0.1:10000/db_name?p1=allowLocalInfile"; - atomUrl.set(url); + // error port Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(atomUrl.get()); + SecurityUtils.checkJdbcConnParams(host, null, username, password, database, extraParams); }); - // contains # - url = "jdbc:mysql://127.0.0.1:10000/db_name?p1=v1&#p2=v2"; - atomUrl.set(url); + // error username Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(atomUrl.get()); + SecurityUtils.checkJdbcConnParams(host, port, " ", password, database, extraParams); + }); + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnParams(host, port, null, password, database, extraParams); }); - } - @Test - public void testCheckJdbcSecurityParamsMap() throws Exception { - Map paramsMap = new HashMap<>(); - paramsMap.put("p1", "v1"); - Map newMap = SecurityUtils.checkJdbcSecurity(paramsMap); - Assertions.assertEquals("v1", newMap.get("p1")); + // check database, The database name can be empty + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnParams(host, port, username, password, " ", extraParams); + }); - // key not security - paramsMap.put("allowLocalInfil%67", "true"); - SecurityUtils.checkJdbcSecurity(paramsMap); - Assertions.assertEquals("true", newMap.get("allowLocalInfilg")); + String database1 = "test?k1=v1"; + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnParams(host, port, username, password, database1, extraParams); + }); - // key not security - paramsMap.put("allowLocalInfile", "false"); + // error param + extraParams.put("allowLoadLocalInfile", "true"); Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(paramsMap); + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); }); - // value not security - paramsMap.clear(); - paramsMap.put("p1", "allowLocalInfile"); + extraParams.clear(); + extraParams.put("autoDeserialize", "true"); Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(paramsMap); + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); }); - // value not security - paramsMap.clear(); - paramsMap.put("p1", "allowLocalInfil%65"); + extraParams.clear(); + extraParams.put("allowLocalInfile", "true"); Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(paramsMap); + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); }); - // contains # - paramsMap.clear(); - paramsMap.put("p1#", "v1"); + extraParams.clear(); + extraParams.put("allowUrlInLocalInfile", "false"); Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(paramsMap); + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); }); - paramsMap.clear(); - paramsMap.put("p1", "v1#"); + extraParams.clear(); + extraParams.put("allowLocalInfil%65", "true"); + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); + }); + + extraParams.clear(); + extraParams.put("#", "true"); Assertions.assertThrows( LinkisSecurityException.class, () -> { - SecurityUtils.checkJdbcSecurity(paramsMap); + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); + }); + + extraParams.clear(); + extraParams.put("test", "#"); + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnParams(host, port, username, password, database, extraParams); + }); + } + + @Test + public void testCheckJdbcConnUrl() { + // true + String url = "jdbc:mysql://127.0.0.1:10000/db_name"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnUrl(url); + }); + // true + String url1 = "jdbc:mysql://127.0.0.1:10000/db_name?"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnUrl(url1); + }); + // true + String url11 = "jdbc:mysql://127.0.0.1/db_name?"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnUrl(url11); }); + // true + String url2 = "JDBC:mysql://127.0.0.1:10000/db_name?"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnUrl(url2); + }); + // true + String url21 = "JDBC:h2://127.0.0.1:10000/db_name?"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnUrl(url21); + }); + // true + String url3 = "jdbc:mysql://127.0.0.1:10000/db_name?p1=v1"; + Assertions.assertDoesNotThrow( + () -> { + SecurityUtils.checkJdbcConnUrl(url3); + }); + // false url error + String url33 = + "jdbc:mysql://127.0.0.1:10000:/db_name?jdbc:mysql://127.0.0.1:10000?allowLocalInfile=true"; + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnUrl(url33); + }); + // false key is not security + String url4 = "jdbc:mysql://127.0.0.1:10000/db_name?p1=v1&allowLocalInfile=true"; + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnUrl(url4); + }); + + // false value is not security + String url5 = "jdbc:mysql://127.0.0.1:10000/db_name?p1=allowLocalInfile"; + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnUrl(url5); + }); + + // false contains # + String url6 = "jdbc:mysql://127.0.0.1:10000/db_name?p1=v1&#p2=v2"; + Assertions.assertThrows( + LinkisSecurityException.class, + () -> { + SecurityUtils.checkJdbcConnUrl(url6); + }); + } + + @Test + public void testAppendMysqlForceParamsExtraParams() { + Map extraParams = new HashMap<>(); + extraParams.put("testKey", "testValue"); + SecurityUtils.appendMysqlForceParams(extraParams); + Assertions.assertEquals("false", extraParams.get("allowLoadLocalInfile")); + Assertions.assertEquals("false", extraParams.get("autoDeserialize")); + Assertions.assertEquals("false", extraParams.get("allowLocalInfile")); + Assertions.assertEquals("false", extraParams.get("allowUrlInLocalInfile")); + Assertions.assertEquals("testValue", extraParams.get("testKey")); + Assertions.assertEquals(null, extraParams.get("otherKey")); } @Test - public void testMapToString() throws Exception { + public void testMapToString() { Map paramsMap = new HashMap<>(); paramsMap.put("p1", "v1"); String str = SecurityUtils.parseParamsMapToMysqlParamUrl(paramsMap); diff --git a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java index d56c3cd2a1e..f033fe8ca99 100644 --- a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/variable/VariableOperationTest.java @@ -38,21 +38,10 @@ public class VariableOperationTest { @Test public void testSqlFormat() throws VariableOperationFailedException { - String jsonOld = - "select \n" - + "\"&{yyyy-MM}\",\n" - + "\"&{yyyy-MM-dd HHmmss}\",\n" - + "\"&yyyyMMddHH\",\n" - + "\"&{yyyy-MM-dd-HH}\""; + String jsonOld = "select \n" + "\"&{yyyy-MM}\""; String jsonNew = VariableOperationUtils.replaces(zonedDateTime, jsonOld); System.out.println(jsonNew); - assertEquals( - jsonNew, - "select \n" - + "\"2022-04\",\n" - + "\"2022-04-02 173507\",\n" - + "\"&yyyyMMddHH\",\n" - + "\"2022-04-02-17\""); + assertEquals(jsonNew, "select \n" + "\"2022-04\""); } @Test diff --git a/linkis-commons/linkis-common/src/test/resources/linkis.properties b/linkis-commons/linkis-common/src/test/resources/linkis.properties new file mode 100644 index 00000000000..d6e47523f29 --- /dev/null +++ b/linkis-commons/linkis-common/src/test/resources/linkis.properties @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +linkis.jobhistory.error.msg.tip=properties支持中文 +linkis.test.error.conf=123 +linkis.test.error.conf2= 456 \ No newline at end of file diff --git a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala index ee1102c91cf..33c8229a4b4 100644 --- a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala +++ b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/conf/ConfigurationTest.scala @@ -27,4 +27,11 @@ class ConfigurationTest { Assertions.assertFalse(Configuration.isAdmin("HaDooop")) } + @Test private[conf] def testFormatValue(): Unit = { + val confvalue = CommonVars[Int]("linkis.test.error.conf", 456).getValue + val confvalue2 = CommonVars[Int]("linkis.test.error.conf2", 789).getValue + Assertions.assertTrue(123 == confvalue) + Assertions.assertTrue(456 == confvalue2) + } + } diff --git a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala index c0d4ad1d618..892731e0d5f 100644 --- a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala +++ b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala @@ -22,6 +22,8 @@ import org.apache.linkis.common.variable.DateTypeUtils.{getCurHour, getToday} import java.util +import scala.collection.mutable + import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test @@ -40,6 +42,10 @@ class VariableUtilsTest { |'${run_half_year_begin-1}' as run_half_year_begin_sub1, |'${run_half_year_begin_std}' as run_half_year_begin_std, |'${run_half_year_end}' as run_half_year_end, + |'${run_last_mon_now}' as run_last_mon_now, + |'${run_last_mon_now_std}' as run_last_mon_now_std, + |'${submit_user}' as submit_user, + |'${execute_user}' as execute_user, |'${run_today_h+12}' as run_today_h_add1""".stripMargin val run_date = new CustomDateType(run_date_str, false) val dateType = DateType(run_date) @@ -57,10 +63,31 @@ class VariableUtilsTest { |'20190701' as run_half_year_begin_sub1, |'2020-01-01' as run_half_year_begin_std, |'20200630' as run_half_year_end, + |'202001' as run_last_mon_now, + |'2020-01' as run_last_mon_now_std, + |'hadoop' as submit_user, + |'hadoop' as execute_user, |'${hourTypeRes}' as run_today_h_add1""".stripMargin val varMap = new util.HashMap[String, String]() varMap.put("run_date", run_date_str) + varMap.put("execute_user", "hadoop") + varMap.put("submit_user", "hadoop") assertEquals(VariableUtils.replace(sql, "sql", varMap), resSql) } + @Test + def testGetCustomVar: Unit = { + var scalaCode = "" + + "-------@set globalpara=60--------\n" + + "--@set globalpara2=66\n" + + "select ${globalpara} as globalpara,\n" + + "-- ${globalpara1} as globalpara1, \n" + + "${globalpara2} as globalpara2;\n" + var pythonCode = "" + + val nameAndValue: mutable.Map[String, String] = + VariableUtils.getCustomVar(scalaCode, CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL); + assertEquals(nameAndValue.size, 2) + } + } diff --git a/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java b/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java index 6c5c125f6d8..608d5fad3c4 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java +++ b/linkis-commons/linkis-hadoop-common/src/main/java/org/apache/linkis/hadoop/common/utils/KerberosUtils.java @@ -17,12 +17,14 @@ package org.apache.linkis.hadoop.common.utils; +import org.apache.linkis.common.utils.Utils; import org.apache.linkis.hadoop.common.conf.HadoopConf; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import java.io.IOException; +import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,6 +35,10 @@ public class KerberosUtils { private static final Logger LOG = LoggerFactory.getLogger(KerberosUtils.class); + private static boolean kerberosRefreshStarted = false; + + private static final Object kerberosRefreshLock = new Object(); + private KerberosUtils() {} private static Configuration createKerberosSecurityConfiguration() { @@ -81,20 +87,20 @@ public static boolean runRefreshKerberosLogin() { public static Long getKerberosRefreshInterval() { long refreshInterval; - String refreshIntervalString = "86400000"; - // defined in linkis-env.sh, if not initialized then the default value is 86400000 ms (1d). - if (System.getenv("LINKIS_JDBC_KERBEROS_REFRESH_INTERVAL") != null) { - refreshIntervalString = System.getenv("LINKIS_JDBC_KERBEROS_REFRESH_INTERVAL"); + String refreshIntervalString = "43200"; + // defined in linkis-env.sh, if not initialized then the default value is 43200 s (0.5d). + if (System.getenv("LINKIS_KERBEROS_REFRESH_INTERVAL") != null) { + refreshIntervalString = System.getenv("LINKIS_KERBEROS_REFRESH_INTERVAL"); } try { refreshInterval = Long.parseLong(refreshIntervalString); } catch (NumberFormatException e) { LOG.error( - "Cannot get time in MS for the given string, " + "Cannot get time in S for the given string, " + refreshIntervalString - + " defaulting to 86400000 ", + + " defaulting to 43200 ", e); - refreshInterval = 86400000L; + refreshInterval = 43200; } return refreshInterval; } @@ -102,14 +108,13 @@ public static Long getKerberosRefreshInterval() { public static Integer kinitFailTimesThreshold() { Integer kinitFailThreshold = 5; // defined in linkis-env.sh, if not initialized then the default value is 5. - if (System.getenv("LINKIS_JDBC_KERBEROS_KINIT_FAIL_THRESHOLD") != null) { + if (System.getenv("LINKIS_KERBEROS_KINIT_FAIL_THRESHOLD") != null) { try { - kinitFailThreshold = - new Integer(System.getenv("LINKIS_JDBC_KERBEROS_KINIT_FAIL_THRESHOLD")); + kinitFailThreshold = new Integer(System.getenv("LINKIS_KERBEROS_KINIT_FAIL_THRESHOLD")); } catch (Exception e) { LOG.error( "Cannot get integer value from the given string, " - + System.getenv("LINKIS_JDBC_KERBEROS_KINIT_FAIL_THRESHOLD") + + System.getenv("LINKIS_KERBEROS_KINIT_FAIL_THRESHOLD") + " defaulting to " + kinitFailThreshold, e); @@ -117,4 +122,71 @@ public static Integer kinitFailTimesThreshold() { } return kinitFailThreshold; } + + public static void checkStatus() { + try { + LOG.info("isSecurityEnabled:" + UserGroupInformation.isSecurityEnabled()); + LOG.info( + "userAuthenticationMethod:" + + UserGroupInformation.getLoginUser().getAuthenticationMethod()); + UserGroupInformation loginUsr = UserGroupInformation.getLoginUser(); + UserGroupInformation curUsr = UserGroupInformation.getCurrentUser(); + LOG.info("LoginUser: " + loginUsr); + LOG.info("CurrentUser: " + curUsr); + if (curUsr == null) { + LOG.info("CurrentUser is null"); + } else { + LOG.info("CurrentUser is not null"); + } + assert curUsr != null; + if (loginUsr.getClass() != curUsr.getClass()) { + LOG.info("getClass() is different"); + } else { + LOG.info("getClass() is same"); + } + if (loginUsr.equals(curUsr)) { + LOG.info("subject is equal"); + } else { + LOG.info("subject is not equal"); + } + } catch (Exception e) { + LOG.error("UGI error: ", e.getMessage()); + } + } + + public static void startKerberosRefreshThread() { + + if (kerberosRefreshStarted || !HadoopConf.KERBEROS_ENABLE()) { + LOG.warn( + "kerberos refresh thread had start or not kerberos {}", HadoopConf.HDFS_ENABLE_CACHE()); + return; + } + synchronized (kerberosRefreshLock) { + if (kerberosRefreshStarted) { + LOG.warn("kerberos refresh thread had start"); + return; + } + kerberosRefreshStarted = true; + LOG.info("kerberos Refresh tread started"); + Utils.defaultScheduler() + .scheduleAtFixedRate( + () -> { + try { + checkStatus(); + if (UserGroupInformation.isLoginKeytabBased()) { + LOG.info("Trying re-login from keytab"); + UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab(); + } else if (UserGroupInformation.isLoginTicketBased()) { + LOG.info("Trying re-login from ticket cache"); + UserGroupInformation.getLoginUser().reloginFromTicketCache(); + } + } catch (Exception e) { + LOG.error("Unable to re-login", e); + } + }, + getKerberosRefreshInterval(), + getKerberosRefreshInterval(), + TimeUnit.SECONDS); + } + } } diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala index fc7f91504f9..1a75418dfc3 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/conf/HadoopConf.scala @@ -23,26 +23,52 @@ object HadoopConf { val HADOOP_ROOT_USER = CommonVars("wds.linkis.hadoop.root.user", "hadoop") - val KERBEROS_ENABLE = CommonVars("wds.linkis.keytab.enable", false) + val KERBEROS_ENABLE = CommonVars("wds.linkis.keytab.enable", false).getValue + + val KERBEROS_ENABLE_MAP = + CommonVars("linkis.keytab.enable.map", "cluster1=false,cluster2=true") val KEYTAB_FILE = CommonVars("wds.linkis.keytab.file", "/appcom/keytab/") + val LINKIS_KEYTAB_FILE = CommonVars("linkis.copy.keytab.file", "/mnt/bdap/keytab/") + + val EXTERNAL_KEYTAB_FILE_PREFIX = + CommonVars("linkis.external.keytab.file.prefix", "/appcom/config/external-conf/keytab") + val KEYTAB_HOST = CommonVars("wds.linkis.keytab.host", "127.0.0.1") + val KEYTAB_HOST_MAP = + CommonVars("linkis.keytab.host.map", "cluster1=127.0.0.2,cluster2=127.0.0.3") + val KEYTAB_HOST_ENABLED = CommonVars("wds.linkis.keytab.host.enabled", false) val KEYTAB_PROXYUSER_ENABLED = CommonVars("wds.linkis.keytab.proxyuser.enable", false) val KEYTAB_PROXYUSER_SUPERUSER = CommonVars("wds.linkis.keytab.proxyuser.superuser", "hadoop") + val KEYTAB_PROXYUSER_SUPERUSER_MAP = + CommonVars("linkis.keytab.proxyuser.superuser.map", "cluster1=hadoop1,cluster2=hadoop2") + val hadoopConfDir = CommonVars("hadoop.config.dir", CommonVars("HADOOP_CONF_DIR", "").getValue).getValue val HADOOP_EXTERNAL_CONF_DIR_PREFIX = CommonVars("wds.linkis.hadoop.external.conf.dir.prefix", "/appcom/config/external-conf/hadoop") + /** + * Whether to close the hdfs underlying cache or turn it off if it is ture + */ + val FS_CACHE_DISABLE = + CommonVars[java.lang.Boolean]("wds.linkis.fs.hdfs.impl.disable.cache", false) + val HDFS_ENABLE_CACHE = CommonVars("wds.linkis.hadoop.hdfs.cache.enable", false).getValue + val HDFS_ENABLE_CACHE_CLOSE = + CommonVars("linkis.hadoop.hdfs.cache.close.enable", true).getValue + + val HDFS_ENABLE_NOT_CLOSE_USERS = + CommonVars("linkis.hadoop.hdfs.cache.not.close.users", "hadoop").getValue + val HDFS_ENABLE_CACHE_IDLE_TIME = CommonVars("wds.linkis.hadoop.hdfs.cache.idle.time", 3 * 60 * 1000).getValue diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala index dfbc5c9347e..f87f89393e9 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala @@ -21,7 +21,7 @@ import org.apache.linkis.hadoop.common.conf.HadoopConf import org.apache.hadoop.fs.FileSystem -class HDFSFileSystemContainer(fs: FileSystem, user: String) { +class HDFSFileSystemContainer(fs: FileSystem, user: String, label: String) { private var lastAccessTime: Long = System.currentTimeMillis() @@ -31,6 +31,8 @@ class HDFSFileSystemContainer(fs: FileSystem, user: String) { def getUser: String = this.user + def getLabel: String = this.label + def getLastAccessTime: Long = this.lastAccessTime def updateLastAccessTime: Unit = { @@ -46,8 +48,7 @@ class HDFSFileSystemContainer(fs: FileSystem, user: String) { def canRemove(): Boolean = { val currentTime = System.currentTimeMillis() val idleTime = currentTime - this.lastAccessTime - idleTime > HadoopConf.HDFS_ENABLE_CACHE_MAX_TIME || (System - .currentTimeMillis() - this.lastAccessTime > HadoopConf.HDFS_ENABLE_CACHE_IDLE_TIME) && count <= 0 + idleTime > HadoopConf.HDFS_ENABLE_CACHE_MAX_TIME || ((idleTime > HadoopConf.HDFS_ENABLE_CACHE_IDLE_TIME) && count <= 0) } } diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala index 922b5f6a8f6..f6d91edbad2 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala @@ -17,7 +17,8 @@ package org.apache.linkis.hadoop.common.utils -import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.conf.Configuration.LINKIS_KEYTAB_SWITCH +import org.apache.linkis.common.utils.{AESUtils, Logging, Utils} import org.apache.linkis.hadoop.common.conf.HadoopConf import org.apache.linkis.hadoop.common.conf.HadoopConf._ import org.apache.linkis.hadoop.common.entity.HDFSFileSystemContainer @@ -29,21 +30,34 @@ import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.security.UserGroupInformation import java.io.File -import java.nio.file.Paths +import java.nio.file.{Files, Paths} +import java.nio.file.attribute.PosixFilePermissions import java.security.PrivilegedExceptionAction -import java.util.concurrent.TimeUnit +import java.util.Base64 +import java.util.concurrent.{ConcurrentHashMap, TimeUnit} +import java.util.concurrent.atomic.AtomicLong import scala.collection.JavaConverters._ object HDFSUtils extends Logging { private val fileSystemCache: java.util.Map[String, HDFSFileSystemContainer] = - new java.util.HashMap[String, HDFSFileSystemContainer]() + new ConcurrentHashMap[String, HDFSFileSystemContainer]() private val LOCKER_SUFFIX = "_HDFS" + private val DEFAULT_CACHE_LABEL = "default" + private val JOINT = "_" + val KEYTAB_SUFFIX = ".keytab" - if (HadoopConf.HDFS_ENABLE_CACHE) { - logger.info("HDFS Cache enabled ") + private val count = new AtomicLong + + /** + * For FS opened with public tenants, we should not perform close action, but should close only + * when hdfsfilesystem encounters closed problem + * 对于使用公共租户开启的FS,我们不应该去执行close动作,应该由hdfsfilesystem遇到closed问题时才进行关闭 + */ + if (HadoopConf.HDFS_ENABLE_CACHE && HadoopConf.HDFS_ENABLE_CACHE_CLOSE) { + logger.info("HDFS Cache clear enabled ") Utils.defaultScheduler.scheduleAtFixedRate( new Runnable { override def run(): Unit = Utils.tryAndWarn { @@ -58,8 +72,13 @@ object HDFSUtils extends Logging { .foreach { hdfsFileSystemContainer => val locker = hdfsFileSystemContainer.getUser + LOCKER_SUFFIX locker.intern() synchronized { - if (hdfsFileSystemContainer.canRemove()) { - fileSystemCache.remove(hdfsFileSystemContainer.getUser) + if ( + hdfsFileSystemContainer.canRemove() && !HadoopConf.HDFS_ENABLE_NOT_CLOSE_USERS + .contains(hdfsFileSystemContainer.getUser) + ) { + fileSystemCache.remove( + hdfsFileSystemContainer.getUser + JOINT + hdfsFileSystemContainer.getLabel + ) IOUtils.closeQuietly(hdfsFileSystemContainer.getFileSystem) logger.info( s"user${hdfsFileSystemContainer.getUser} to remove hdfsFileSystemContainer,because hdfsFileSystemContainer can remove" @@ -116,79 +135,153 @@ object HDFSUtils extends Logging { ) def getHDFSRootUserFileSystem(conf: org.apache.hadoop.conf.Configuration): FileSystem = - getHDFSUserFileSystem(HADOOP_ROOT_USER.getValue, conf) + getHDFSUserFileSystem(HADOOP_ROOT_USER.getValue, null, conf) - def getHDFSUserFileSystem(userName: String): FileSystem = - getHDFSUserFileSystem(userName, getConfiguration(userName)) + /** + * If the cache switch is turned on, fs will be obtained from the cache first + * @param userName + * @return + */ + def getHDFSUserFileSystem(userName: String): FileSystem = { + getHDFSUserFileSystem(userName, null) + } + + def getHDFSUserFileSystem(userName: String, label: String): FileSystem = { + + if (HadoopConf.HDFS_ENABLE_CACHE) { + val cacheLabel = if (label == null) DEFAULT_CACHE_LABEL else label + val cacheKey = userName + JOINT + cacheLabel + val locker = userName + LOCKER_SUFFIX + locker.intern().synchronized { + if (fileSystemCache.containsKey(cacheKey)) { + val hdfsFileSystemContainer = fileSystemCache.get(cacheKey) + hdfsFileSystemContainer.addAccessCount() + hdfsFileSystemContainer.updateLastAccessTime + hdfsFileSystemContainer.getFileSystem + } else { + getHDFSUserFileSystem(userName, label, getConfigurationByLabel(userName, label)) + } + } + } else { + getHDFSUserFileSystem(userName, label, getConfigurationByLabel(userName, label)) + } + } def getHDFSUserFileSystem( userName: String, + label: String, conf: org.apache.hadoop.conf.Configuration - ): FileSystem = if (HadoopConf.HDFS_ENABLE_CACHE) { - val locker = userName + LOCKER_SUFFIX - locker.intern().synchronized { - val hdfsFileSystemContainer = if (fileSystemCache.containsKey(userName)) { - fileSystemCache.get(userName) - } else { - val newHDFSFileSystemContainer = - new HDFSFileSystemContainer(createFileSystem(userName, conf), userName) - fileSystemCache.put(userName, newHDFSFileSystemContainer) - newHDFSFileSystemContainer + ): FileSystem = { + + if (HadoopConf.FS_CACHE_DISABLE.getValue && null != conf) { + conf.set("fs.hdfs.impl.disable.cache", "true") + } + if (HadoopConf.HDFS_ENABLE_CACHE) { + val locker = userName + LOCKER_SUFFIX + val cacheLabel = if (label == null) DEFAULT_CACHE_LABEL else label + val cacheKey = userName + JOINT + cacheLabel + locker.intern().synchronized { + val hdfsFileSystemContainer = if (fileSystemCache.containsKey(cacheKey)) { + fileSystemCache.get(cacheKey) + } else { + // we use cacheLabel to create HDFSFileSystemContainer, and in the rest part of HDFSUtils, we consistently + // use the same cacheLabel to operate HDFSFileSystemContainer, like close or remove. + // At the same time, we don't want to change the behavior of createFileSystem which is out of HDFSUtils, + // so we continue to use the original label to createFileSystem. + val newHDFSFileSystemContainer = + new HDFSFileSystemContainer( + createFileSystem(userName, label, conf), + userName, + cacheLabel + ) + fileSystemCache.put(cacheKey, newHDFSFileSystemContainer) + newHDFSFileSystemContainer + } + hdfsFileSystemContainer.addAccessCount() + hdfsFileSystemContainer.updateLastAccessTime + hdfsFileSystemContainer.getFileSystem } - hdfsFileSystemContainer.addAccessCount() - hdfsFileSystemContainer.updateLastAccessTime - hdfsFileSystemContainer.getFileSystem + } else { + createFileSystem(userName, label, conf) } - } else { - createFileSystem(userName, conf) } def createFileSystem(userName: String, conf: org.apache.hadoop.conf.Configuration): FileSystem = - getUserGroupInformation(userName) + createFileSystem(userName, null, conf) + + def createFileSystem( + userName: String, + label: String, + conf: org.apache.hadoop.conf.Configuration + ): FileSystem = { + val createCount = count.getAndIncrement() + logger.info(s"user ${userName} to create Fs, create time ${createCount}") + getUserGroupInformation(userName, label) .doAs(new PrivilegedExceptionAction[FileSystem] { - // scalastyle:off FileSystemGet - def run: FileSystem = FileSystem.get(conf) - // scalastyle:on FileSystemGet + def run: FileSystem = FileSystem.newInstance(conf) }) + } def closeHDFSFIleSystem(fileSystem: FileSystem, userName: String): Unit = if (null != fileSystem && StringUtils.isNotBlank(userName)) { - closeHDFSFIleSystem(fileSystem, userName, false) + closeHDFSFIleSystem(fileSystem, userName, null, false) } + def closeHDFSFIleSystem(fileSystem: FileSystem, userName: String, label: String): Unit = + closeHDFSFIleSystem(fileSystem, userName, label, false) + def closeHDFSFIleSystem(fileSystem: FileSystem, userName: String, isForce: Boolean): Unit = + closeHDFSFIleSystem(fileSystem, userName, null, isForce) + + def closeHDFSFIleSystem( + fileSystem: FileSystem, + userName: String, + label: String, + isForce: Boolean + ): Unit = if (null != fileSystem && StringUtils.isNotBlank(userName)) { - if (HadoopConf.HDFS_ENABLE_CACHE) { - val hdfsFileSystemContainer = fileSystemCache.get(userName) - if (null != hdfsFileSystemContainer) { - val locker = userName + LOCKER_SUFFIX + val locker = userName + LOCKER_SUFFIX + if (HadoopConf.HDFS_ENABLE_CACHE) locker.intern().synchronized { + val cacheLabel = if (label == null) DEFAULT_CACHE_LABEL else label + val cacheKey = userName + JOINT + cacheLabel + val hdfsFileSystemContainer = fileSystemCache.get(cacheKey) + if ( + null != hdfsFileSystemContainer && fileSystem == hdfsFileSystemContainer.getFileSystem + ) { if (isForce) { - locker synchronized fileSystemCache.remove(hdfsFileSystemContainer.getUser) + fileSystemCache.remove(hdfsFileSystemContainer.getUser) IOUtils.closeQuietly(hdfsFileSystemContainer.getFileSystem) logger.info( s"user${hdfsFileSystemContainer.getUser} to Force remove hdfsFileSystemContainer" ) } else { - locker synchronized hdfsFileSystemContainer.minusAccessCount() + hdfsFileSystemContainer.minusAccessCount() } + } else { + IOUtils.closeQuietly(fileSystem) } - } else { + } + else { IOUtils.closeQuietly(fileSystem) } } def getUserGroupInformation(userName: String): UserGroupInformation = { - if (KERBEROS_ENABLE.getValue) { - if (!KEYTAB_PROXYUSER_ENABLED.getValue) { - val path = new File(KEYTAB_FILE.getValue, userName + ".keytab").getPath - val user = getKerberosUser(userName) - UserGroupInformation.setConfiguration(getConfiguration(userName)) + getUserGroupInformation(userName, null); + } + + def getUserGroupInformation(userName: String, label: String): UserGroupInformation = { + if (isKerberosEnabled(label)) { + if (!isKeytabProxyUserEnabled(label)) { + val path = getLinkisUserKeytabFile(userName, label) + val user = getKerberosUser(userName, label) + UserGroupInformation.setConfiguration(getConfigurationByLabel(userName, label)) UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, path) } else { - val superUser = KEYTAB_PROXYUSER_SUPERUSER.getValue - val path = new File(KEYTAB_FILE.getValue, superUser + ".keytab").getPath - val user = getKerberosUser(superUser) - UserGroupInformation.setConfiguration(getConfiguration(superUser)) + val superUser = getKeytabSuperUser(label) + val path = getLinkisUserKeytabFile(superUser, label) + val user = getKerberosUser(superUser, label) + UserGroupInformation.setConfiguration(getConfigurationByLabel(superUser, label)) UserGroupInformation.createProxyUser( userName, UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, path) @@ -199,12 +292,108 @@ object HDFSUtils extends Logging { } } - def getKerberosUser(userName: String): String = { + def isKerberosEnabled(label: String): Boolean = { + if (label == null) { + KERBEROS_ENABLE + } else { + kerberosValueMapParser(KERBEROS_ENABLE_MAP.getValue).get(label).contains("true") + } + } + + def isKeytabProxyUserEnabled(label: String): Boolean = { + if (label == null) { + KEYTAB_PROXYUSER_ENABLED.getValue + } else { + kerberosValueMapParser(KEYTAB_PROXYUSER_SUPERUSER_MAP.getValue).contains(label) + } + } + + def getKerberosUser(userName: String, label: String): String = { var user = userName - if (KEYTAB_HOST_ENABLED.getValue) { - user = user + "/" + KEYTAB_HOST.getValue + if (label == null) { + if (KEYTAB_HOST_ENABLED.getValue) { + user = user + "/" + KEYTAB_HOST.getValue + } + } else { + val hostMap = kerberosValueMapParser(KEYTAB_HOST_MAP.getValue) + if (hostMap.contains(label)) { + user = user + "/" + hostMap(label) + } } user } + def getKeytabSuperUser(label: String): String = { + if (label == null) { + KEYTAB_PROXYUSER_SUPERUSER.getValue + } else { + kerberosValueMapParser(KEYTAB_PROXYUSER_SUPERUSER_MAP.getValue)(label) + } + } + + def getKeytabPath(label: String): String = { + if (label == null) { + KEYTAB_FILE.getValue + } else { + val prefix = if (EXTERNAL_KEYTAB_FILE_PREFIX.getValue.endsWith("/")) { + EXTERNAL_KEYTAB_FILE_PREFIX.getValue + } else { + EXTERNAL_KEYTAB_FILE_PREFIX.getValue + "/" + } + prefix + label + } + } + + def getLinkisKeytabPath(label: String): String = { + if (label == null) { + LINKIS_KEYTAB_FILE.getValue + } else { + val prefix = if (EXTERNAL_KEYTAB_FILE_PREFIX.getValue.endsWith("/")) { + EXTERNAL_KEYTAB_FILE_PREFIX.getValue + } else { + EXTERNAL_KEYTAB_FILE_PREFIX.getValue + "/" + } + prefix + label + } + } + + private def kerberosValueMapParser(configV: String): Map[String, String] = { + val confDelimiter = "," + if (configV == null || "".equals(configV)) { + Map() + } else { + configV + .split(confDelimiter) + .filter(x => x != null && !"".equals(x)) + .map(x => { + val confArr = x.split("=") + if (confArr.length == 2) { + (confArr(0).trim, confArr(1).trim) + } else null + }) + .filter(kerberosValue => + kerberosValue != null && StringUtils.isNotBlank( + kerberosValue._1 + ) && null != kerberosValue._2 + ) + .toMap + } + } + + private def getLinkisUserKeytabFile(userName: String, label: String): String = { + val path = if (LINKIS_KEYTAB_SWITCH) { + // 读取文件 + val byte = Files.readAllBytes(Paths.get(getLinkisKeytabPath(label), userName + KEYTAB_SUFFIX)) + // 加密内容// 加密内容 + val encryptedContent = AESUtils.decrypt(byte, AESUtils.PASSWORD) + val tempFile = Files.createTempFile(userName, KEYTAB_SUFFIX) + Files.setPosixFilePermissions(tempFile, PosixFilePermissions.fromString("rw-------")) + Files.write(tempFile, encryptedContent) + tempFile.toString + } else { + new File(getKeytabPath(label), userName + KEYTAB_SUFFIX).getPath + } + path + } + } diff --git a/linkis-commons/linkis-hadoop-common/src/test/java/org/apache/linkis/hadoop/common/utils/KerberosUtilsTest.java b/linkis-commons/linkis-hadoop-common/src/test/java/org/apache/linkis/hadoop/common/utils/KerberosUtilsTest.java deleted file mode 100644 index b84988a74a2..00000000000 --- a/linkis-commons/linkis-hadoop-common/src/test/java/org/apache/linkis/hadoop/common/utils/KerberosUtilsTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.hadoop.common.utils; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class KerberosUtilsTest { - - @Test - @DisplayName("getKerberosRefreshIntervalTest") - public void getKerberosRefreshIntervalTest() { - - Long refreshInterval = KerberosUtils.getKerberosRefreshInterval(); - Assertions.assertTrue(86400000L == refreshInterval.longValue()); - } - - @Test - @DisplayName("kinitFailTimesThresholdTest") - public void kinitFailTimesThresholdTest() { - - Integer timesThreshold = KerberosUtils.kinitFailTimesThreshold(); - Assertions.assertTrue(5 == timesThreshold.intValue()); - } -} diff --git a/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala b/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala index 44ca1dabcb9..7c2c7b38355 100644 --- a/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala +++ b/linkis-commons/linkis-hadoop-common/src/test/scala/org/apache/linkis/hadoop/common/conf/HadoopConfTest.scala @@ -26,7 +26,7 @@ class HadoopConfTest { def constTest(): Unit = { Assertions.assertEquals("hadoop", HadoopConf.HADOOP_ROOT_USER.getValue) - Assertions.assertFalse(HadoopConf.KERBEROS_ENABLE.getValue) + Assertions.assertFalse(HadoopConf.KERBEROS_ENABLE) Assertions.assertEquals("/appcom/keytab/", HadoopConf.KEYTAB_FILE.getValue) Assertions.assertEquals("127.0.0.1", HadoopConf.KEYTAB_HOST.getValue) Assertions.assertFalse(HadoopConf.KEYTAB_HOST_ENABLED.getValue) diff --git a/linkis-commons/linkis-httpclient/pom.xml b/linkis-commons/linkis-httpclient/pom.xml index 473b591a085..1951e3cd4f2 100644 --- a/linkis-commons/linkis-httpclient/pom.xml +++ b/linkis-commons/linkis-httpclient/pom.xml @@ -43,30 +43,6 @@ ${httpmime.version} - - org.json4s - json4s-jackson_${scala.binary.version} - ${json4s.version} - - - org.scala-lang - scala-library - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - - diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala index 7b443eb9203..1d927e1321e 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala @@ -59,12 +59,22 @@ import org.apache.http.conn.{ ConnectTimeoutException, HttpHostConnectException } +import org.apache.http.conn.ssl.{SSLConnectionSocketFactory, TrustSelfSignedStrategy} import org.apache.http.entity.{ContentType, StringEntity} import org.apache.http.entity.mime.MultipartEntityBuilder -import org.apache.http.impl.client.{BasicCookieStore, CloseableHttpClient, HttpClients} +import org.apache.http.impl.client.{ + BasicCookieStore, + CloseableHttpClient, + HttpClientBuilder, + HttpClients +} +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager import org.apache.http.message.BasicNameValuePair +import org.apache.http.ssl.SSLContextBuilder import org.apache.http.util.EntityUtils +import javax.net.ssl.{HostnameVerifier, SSLContext, SSLSession} + import java.net.URI import java.util import java.util.Locale @@ -80,12 +90,29 @@ abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String protected val cookieStore = new BasicCookieStore - protected val httpClient: CloseableHttpClient = HttpClients + protected val connectionManager = new PoolingHttpClientConnectionManager + + private val httpClientBuilder: HttpClientBuilder = HttpClients .custom() .setDefaultCookieStore(cookieStore) .setMaxConnTotal(clientConfig.getMaxConnection) .setMaxConnPerRoute(clientConfig.getMaxConnection / 2) - .build + .setConnectionManager(connectionManager) + + protected val httpClient: CloseableHttpClient = if (clientConfig.isSSL) { + val sslContext: SSLContext = + SSLContextBuilder.create.loadTrustMaterial(null, new TrustSelfSignedStrategy).build + + val sslConnectionFactory = new SSLConnectionSocketFactory( + sslContext, + new HostnameVerifier() { + override def verify(hostname: String, session: SSLSession) = true + } + ) + httpClientBuilder.setSSLSocketFactory(sslConnectionFactory).build() + } else { + httpClientBuilder.build() + } if (clientConfig.getAuthenticationStrategy != null) { clientConfig.getAuthenticationStrategy match { @@ -137,28 +164,35 @@ abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String val prepareReqTime = System.currentTimeMillis - startTime prepareCookie(action) val attempts = new util.ArrayList[Long]() - def addAttempt(): CloseableHttpResponse = { val req = prepareReq(action) val startTime = System.currentTimeMillis val response = executeRequest(req, Some(waitTime).filter(_ > 0)) + val taken = System.currentTimeMillis - startTime + attempts.add(taken) + val costTime = ByteTimeUtils.msDurationToString(taken) + logger.info( + s"invoke ${req.getURI} get status ${response.getStatusLine.getStatusCode} taken: ${costTime}." + ) if (response.getStatusLine.getStatusCode == 401) { - tryLogin(action, getRequestUrl(action), true) - logger.info("The user is not logged in, please log in first, you can set a retry") val msg = Utils.tryCatch(EntityUtils.toString(response.getEntity)) { t => logger.warn("failed to parse entity", t) "" } IOUtils.closeQuietly(response) - throw new HttpClientRetryException( - "The user is not logged in, please log in first, you can set a retry, message: " + msg - ) + tryLogin(action, getRequestUrl(action), true) + if (attempts.size() <= 1) { + logger.info("The user is not logged in, default retry once") + addAttempt() + } else { + logger.info("The user is not logged in, you can set a retry") + throw new HttpClientRetryException( + "The user is not logged in, please log in first, you can set a retry, message: " + msg + ) + } + } else { + response } - val taken = System.currentTimeMillis - startTime - attempts.add(taken) - val costTime = ByteTimeUtils.msDurationToString(taken) - logger.info(s"invoke ${req.getURI} taken: ${costTime}.") - response } val response = @@ -570,4 +604,16 @@ abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String httpClient.close() } + def getHttpConnectionStats: util.HashMap[String, Int] = { + val totalStats = connectionManager.getTotalStats + val clientConnectInfo = new util.HashMap[String, Int]() + clientConnectInfo.put("leased", totalStats.getLeased) + clientConnectInfo.put("avaiLabel", totalStats.getAvailable) + clientConnectInfo.put("maxTotal", connectionManager.getMaxTotal) + logger.info(s"BMLClient:总最大连接数:${connectionManager.getMaxTotal}") + logger.info(s"BMLClient:空闲连接数:${totalStats.getAvailable}") + logger.info(s"BMLClient:活跃连接数:${totalStats.getLeased}") + clientConnectInfo + } + } diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala index c45de8f4664..30f04999c52 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala @@ -54,6 +54,9 @@ abstract class AbstractAuthenticationStrategy extends AuthenticationStrategy wit getKeyByUserAndURL(user, serverUrl) } + protected def getAuthenticationActionByKey(key: String): Authentication = + userNameToAuthentications.get(key) + def setClientConfig(clientConfig: ClientConfig): Unit = this.clientConfig = clientConfig def getClientConfig: ClientConfig = clientConfig @@ -61,16 +64,14 @@ abstract class AbstractAuthenticationStrategy extends AuthenticationStrategy wit def login(requestAction: Action, serverUrl: String): Authentication = { val key = getKey(requestAction, serverUrl) if (key == null) return null - if ( - userNameToAuthentications - .containsKey(key) && !isTimeout(userNameToAuthentications.get(key)) - ) { - val authenticationAction = userNameToAuthentications.get(key) + val oldAuth = getAuthenticationActionByKey(key) + if (null != oldAuth && !isTimeout(oldAuth)) { + val authenticationAction = oldAuth authenticationAction.updateLastAccessTime() authenticationAction } else { key.intern() synchronized { - var authentication = userNameToAuthentications.get(key) + var authentication = getAuthenticationActionByKey(key) if (authentication == null || isTimeout(authentication)) { authentication = tryLogin(requestAction, serverUrl) putSession(key, authentication) diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala index e40a10cd834..18e7dddd0c8 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/authentication/Authentication.scala @@ -25,4 +25,6 @@ trait Authentication { def updateLastAccessTime(): Unit + def getCreateTime: Long = System.currentTimeMillis() + } diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala index dbce2d32a21..dea081bd3bc 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfig.scala @@ -44,6 +44,7 @@ class ClientConfig private () { private var maxConnection: Int = 20 private var retryEnabled: Boolean = _ private var retryHandler: RetryHandler = _ + private var ssl: Boolean = false protected[config] def this( serverUrl: String, @@ -59,7 +60,8 @@ class ClientConfig private () { retryEnabled: Boolean, retryHandler: RetryHandler, authTokenKey: String, - authTokenValue: String + authTokenValue: String, + isSSL: Boolean = false ) = { this() this.serverUrl = serverUrl @@ -78,6 +80,7 @@ class ClientConfig private () { this.retryHandler = retryHandler this.authTokenKey = authTokenKey this.authTokenValue = authTokenValue + this.ssl = isSSL authenticationStrategy match { case ab: AbstractAuthenticationStrategy => ab.setClientConfig(this) case _ => @@ -123,4 +126,6 @@ class ClientConfig private () { def getRetryHandler: RetryHandler = retryHandler + def isSSL: Boolean = ssl + } diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala index c3e5afba302..a574b89fb5b 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala @@ -17,6 +17,7 @@ package org.apache.linkis.httpclient.config +import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.common.utils.{DefaultRetryHandler, RetryHandler} import org.apache.linkis.httpclient.authentication.AuthenticationStrategy import org.apache.linkis.httpclient.loadbalancer.LoadBalancerStrategy @@ -38,7 +39,14 @@ class ClientConfigBuilder protected () { protected var readTimeout: Long = _ protected var maxConnection: Int = _ protected var retryEnabled: Boolean = true - protected var retryHandler: RetryHandler = new DefaultRetryHandler + + protected var ssl: Boolean = false + + protected var retryHandler: RetryHandler = { + val retryHandler = new DefaultRetryHandler + retryHandler.addRetryException(classOf[LinkisRetryException]) + retryHandler + } def addServerUrl(serverUrl: String): this.type = { this.serverUrl = serverUrl @@ -106,6 +114,11 @@ class ClientConfigBuilder protected () { this } + def setSSL(isSSL: Boolean): this.type = { + this.ssl = isSSL + this + } + def build(): ClientConfig = new ClientConfig( serverUrl, discoveryEnabled, @@ -120,7 +133,8 @@ class ClientConfigBuilder protected () { retryEnabled, retryHandler, authTokenKey, - authTokenValue + authTokenValue, + ssl ) } diff --git a/linkis-commons/linkis-module/pom.xml b/linkis-commons/linkis-module/pom.xml index b70331e0d11..6fbf0ca58dc 100644 --- a/linkis-commons/linkis-module/pom.xml +++ b/linkis-commons/linkis-module/pom.xml @@ -41,6 +41,24 @@ ${knife4j.version} + + io.springfox + springfox-spring-webflux + ${springfox.version} + + + io.github.classgraph + classgraph + + + + + + io.github.classgraph + classgraph + ${classgraph.version} + + org.springframework spring-core @@ -49,7 +67,6 @@ org.springframework.boot spring-boot - ${spring.boot.version} org.apache.logging.log4j @@ -113,9 +130,29 @@ io.github.x-stream mxparser + + com.fasterxml.woodstox + woodstox-core + + + org.codehaus.jettison + jettison + + + com.fasterxml.woodstox + woodstox-core + ${woodstox.core.version} + + + + org.codehaus.jettison + jettison + ${jettision.version} + + org.springframework.boot spring-boot-starter-cache @@ -197,59 +234,6 @@ compile - - org.springframework.cloud - spring-cloud-starter-config - - - org.springframework.boot - spring-boot-starter-logging - - - org.springframework - spring-web - - - org.springframework.boot - spring-boot-starter - - - org.springframework.boot - spring-boot-autoconfigure - - - org.springframework.cloud - spring-cloud-config-client - - - org.springframework.cloud - spring-cloud-starter - - - - - - org.springframework.cloud - spring-cloud-config-client - - - org.springframework.cloud - spring-cloud-commons - - - org.springframework.cloud - spring-cloud-context - - - org.springframework.boot - spring-boot-autoconfigure - - - org.springframework - spring-web - - - org.springframework.cloud spring-cloud-starter @@ -322,30 +306,7 @@ validation-api 2.0.1.Final - - commons-dbcp - commons-dbcp - 1.4 - - - com.sun.jersey - jersey-server - - - javax.ws.rs - jsr311-api - - - javax.validation - validation-api - - - - - com.sun.jersey - jersey-servlet - org.glassfish.jersey.containers jersey-container-servlet @@ -361,21 +322,6 @@ - - com.sun.jersey - jersey-json - - - javax.ws.rs - jsr311-api - - - com.sun.jersey - jersey-core - - - - com.fasterxml.jackson.core jackson-databind @@ -399,27 +345,22 @@ io.netty netty-all - - org.json4s - json4s-jackson_${scala.binary.version} - ${json4s.version} - - - org.scala-lang - scala-library - - - com.fasterxml.jackson.core - jackson-databind - - - - redis.clients jedis ${jedis.version} + + org.springframework.cloud + spring-cloud-openfeign-core + + + + org.springframework.retry + spring-retry + 1.3.4 + + @@ -431,4 +372,127 @@ + + + eureka + + true + + discovery + eureka + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + javax.ws.rs + jsr311-api + + + org.springframework.boot + spring-boot-autoconfigure + + + org.springframework.boot + spring-boot-starter-aop + + + org.springframework.cloud + spring-cloud-starter + + + org.springframework.cloud + spring-cloud-commons + + + org.springframework.cloud + spring-cloud-context + + + org.springframework.boot + spring-boot-starter + + + org.springframework.boot + spring-boot-starter-cache + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + io.github.x-stream + mxparser + + + + + + + nacos + + + discovery + nacos + + + + + com.alibaba.cloud + spring-cloud-starter-alibaba-nacos-discovery + ${spring-cloud-alibaba.version} + + + org.springframework.boot + * + + + org.springframework.cloud + spring-cloud-commons + + + org.springframework.cloud + spring-cloud-context + + + org.springframework.boot + spring-boot-starter + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.google.code.findbugs + jsr305 + + + org.yaml + snakeyaml + + + io.prometheus + simpleclient + + + com.google.guava + guava + + + + + + + diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java index 6f0256fdfa7..10ab8f92680 100644 --- a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/DataWorkCloudApplication.java @@ -26,8 +26,6 @@ import org.apache.linkis.server.conf.ServerConfiguration; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.springframework.boot.SpringApplication; import org.springframework.boot.WebApplicationType; @@ -41,6 +39,7 @@ import org.springframework.cloud.client.discovery.EnableDiscoveryClient; import org.springframework.cloud.context.config.annotation.RefreshScope; import org.springframework.cloud.context.scope.refresh.RefreshScopeRefreshedEvent; +import org.springframework.cloud.openfeign.EnableFeignClients; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; @@ -49,6 +48,7 @@ import org.springframework.core.env.Environment; import org.springframework.core.env.PropertySource; import org.springframework.core.env.StandardEnvironment; +import org.springframework.retry.annotation.EnableRetry; import org.springframework.web.filter.CharacterEncodingFilter; import javax.servlet.DispatcherType; @@ -62,12 +62,16 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.webapp.WebAppContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @SpringBootApplication(scanBasePackages = {"org.apache.linkis", "com.webank.wedatasphere"}) @EnableDiscoveryClient @RefreshScope +@EnableFeignClients +@EnableRetry public class DataWorkCloudApplication extends SpringBootServletInitializer { - private static final Log logger = LogFactory.getLog(DataWorkCloudApplication.class); + private static final Logger logger = LoggerFactory.getLogger(DataWorkCloudApplication.class); private static ConfigurableApplicationContext applicationContext; private static ServiceInstance serviceInstance; diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/InterceptorConfigure.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/InterceptorConfigure.java new file mode 100644 index 00000000000..c9c52fc4303 --- /dev/null +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/InterceptorConfigure.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.server; + +import org.springframework.context.annotation.Configuration; +import org.springframework.web.servlet.config.annotation.InterceptorRegistry; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; + +@Configuration +public class InterceptorConfigure implements WebMvcConfigurer { + + @Override + public void addInterceptors(InterceptorRegistry registry) { + registry.addInterceptor(new PerformanceInterceptor()); + } +} diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/PerformanceInterceptor.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/PerformanceInterceptor.java new file mode 100644 index 00000000000..2a9cb2dd028 --- /dev/null +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/PerformanceInterceptor.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.server; + +import org.apache.linkis.utils.LinkisSpringUtils; + +import org.springframework.web.servlet.HandlerInterceptor; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class PerformanceInterceptor implements HandlerInterceptor { + + private static final Logger logger = LoggerFactory.getLogger(PerformanceInterceptor.class); + + @Override + public boolean preHandle( + HttpServletRequest request, HttpServletResponse response, Object handler) { + request.setAttribute("Linkis_startTime", System.currentTimeMillis()); + return true; + } + + @Override + public void afterCompletion( + HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) { + Object startObject = request.getAttribute("Linkis_startTime"); + if (null != startObject) { + long startTime = (Long) startObject; + long endTime = System.currentTimeMillis(); + long executeTime = endTime - startTime; + logger.info( + "Request client address:{} request URL: {} Method: {} taken: {} ms", + LinkisSpringUtils.getClientIP(request), + request.getRequestURI(), + request.getMethod(), + executeTime); + } + } +} diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java index 3661a66cb84..0c4b7dea057 100644 --- a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/server/utils/ModuleUserUtils.java @@ -113,4 +113,8 @@ public static String getTokenUser(HttpServletRequest httpServletRequest) { } return tokenUser; } + + public static void printAuditLog(String auditLogMsg) { + LOGGER.info(auditLogMsg); + } } diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/swagger/SwaggerBeanPostProcessor.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/swagger/SwaggerBeanPostProcessor.java new file mode 100644 index 00000000000..f07b7ed0a6e --- /dev/null +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/swagger/SwaggerBeanPostProcessor.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.swagger; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.context.annotation.Configuration; +import org.springframework.util.ReflectionUtils; +import org.springframework.web.servlet.mvc.method.RequestMappingInfoHandlerMapping; + +import java.lang.reflect.Field; +import java.util.List; +import java.util.stream.Collectors; + +import springfox.documentation.spring.web.plugins.WebFluxRequestHandlerProvider; +import springfox.documentation.spring.web.plugins.WebMvcRequestHandlerProvider; + +@Configuration +public class SwaggerBeanPostProcessor implements BeanPostProcessor { + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + if (bean instanceof WebMvcRequestHandlerProvider + || bean instanceof WebFluxRequestHandlerProvider) { + List handlerMappings = getHandlerMappings(bean); + customizeSpringfoxHandlerMappings(handlerMappings); + } + return bean; + } + + private void customizeSpringfoxHandlerMappings( + List mappings) { + List copy = + mappings.stream() + .filter(mapping -> mapping.getPatternParser() == null) + .collect(Collectors.toList()); + mappings.clear(); + mappings.addAll(copy); + } + + @SuppressWarnings("unchecked") + private List getHandlerMappings(Object bean) { + try { + Field field = ReflectionUtils.findField(bean.getClass(), "handlerMappings"); + field.setAccessible(true); + return (List) field.get(bean); + } catch (IllegalArgumentException | IllegalAccessException e) { + throw new IllegalStateException(e); + } + } +} diff --git a/linkis-commons/linkis-module/src/main/java/org/apache/linkis/utils/LinkisSpringUtils.java b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/utils/LinkisSpringUtils.java new file mode 100644 index 00000000000..8021bb11910 --- /dev/null +++ b/linkis-commons/linkis-module/src/main/java/org/apache/linkis/utils/LinkisSpringUtils.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.utils; + +import javax.servlet.http.HttpServletRequest; + +public class LinkisSpringUtils { + + public static String getClientIP(HttpServletRequest request) { + String clientIp = request.getHeader("X-Forwarded-For"); + + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("Proxy-Client-IP"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("WL-Proxy-Client-IP"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("HTTP_CLIENT_IP"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getHeader("HTTP_X_FORWARDED_FOR"); + } + if (clientIp == null || clientIp.isEmpty() || "unknown".equalsIgnoreCase(clientIp)) { + clientIp = request.getRemoteAddr(); + } + if (clientIp != null && clientIp.contains(",")) { + clientIp = clientIp.split(",")[0]; + } + + return clientIp; + } +} diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala index c454aaacb22..23d07bdf905 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/Knife4jConfig.scala @@ -47,9 +47,7 @@ import springfox.documentation.swagger2.annotations.EnableSwagger2WebMvc * 4, in your browser,add dataworkcloud_inner_request=true, bdp-user-ticket-id's value and workspaceId's value into cookie * */ -@EnableSwagger2WebMvc -@EnableKnife4j -@Configuration + class Knife4jConfig extends WebMvcConfigurer { @Value("${spring.application.name}") private var appName = "linkis service" diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala index 582568e6262..ed6c680648a 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala @@ -38,7 +38,7 @@ object ServerConfiguration extends Logging { val BDP_SERVER_SPRING_APPLICATION_LISTENERS = CommonVars("wds.linkis.server.spring.application.listeners", "") - val BDP_SERVER_VERSION: String = CommonVars("wds.linkis.server.version", "").getValue + val BDP_SERVER_VERSION: String = CommonVars("wds.linkis.server.version", "v1").getValue if (StringUtils.isBlank(BDP_SERVER_VERSION)) { throw new BDPInitServerException( @@ -207,4 +207,7 @@ object ServerConfiguration extends Logging { val LINKIS_SERVER_SESSION_PROXY_TICKETID_KEY = CommonVars("wds.linkis.session.proxy.user.ticket.key", "linkis_user_session_proxy_ticket_id_v1") + val LINKIS_SERVER_ENTRANCE_HEADER_KEY = + CommonVars("linkis.server.entrance.header.key", "jobInstanceKey") + } diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala index 411bda4820b..b372ead651e 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala @@ -83,7 +83,7 @@ class SecurityFilter extends Filter { ServerConfiguration.BDP_SERVER_RESTFUL_PASS_AUTH_REQUEST_URI .exists(r => !r.equals("") && request.getRequestURI.startsWith(r)) ) { - logger.info("pass auth uri: " + request.getRequestURI) + logger.debug("pass auth uri: " + request.getRequestURI) true } else { val userName = Utils.tryCatch(SecurityFilter.getLoginUser(request)) { @@ -199,7 +199,11 @@ object SecurityFilter { def getLoginUsername(req: HttpServletRequest): String = { if (Configuration.IS_TEST_MODE.getValue) { - ServerConfiguration.BDP_TEST_USER.getValue; + val testUser = ServerConfiguration.BDP_TEST_USER.getValue + if (StringUtils.isBlank(testUser)) { + throw new IllegalUserTicketException("Need to set test user when enable test module") + } + testUser } else { getLoginUser(req).getOrElse( throw new IllegalUserTicketException(ILLEGAL_USER_TOKEN.getErrorDesc) diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala index 3ec7495f1f1..73699f38ef1 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala @@ -70,7 +70,7 @@ object RedisClient { SessionHAConfiguration.RedisHost, SessionHAConfiguration.RedisPort, redisTimeout, - SessionHAConfiguration.RedisSentinalServer + SessionHAConfiguration.RedisPassword ) } @@ -98,8 +98,8 @@ object RedisClient { maxIdle: Int, minIdle: Int, maxWaitMillis: Long - ): GenericObjectPoolConfig[Nothing] = { - val poolConfig = new GenericObjectPoolConfig + ): GenericObjectPoolConfig[Jedis] = { + val poolConfig = new GenericObjectPoolConfig[Jedis]() poolConfig.setMaxTotal(maxTotal) poolConfig.setMaxIdle(maxIdle) poolConfig.setMinIdle(minIdle) diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/utils/LinkisMainHelper.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/utils/LinkisMainHelper.scala index 7614bbef2ae..5c04d985f44 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/utils/LinkisMainHelper.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/utils/LinkisMainHelper.scala @@ -61,7 +61,7 @@ object LinkisMainHelper { } resArr = resArr :+ s"--prometheus.endpoint=$prometheusEndpoint" } - return resArr + resArr } } diff --git a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java index 8def4f6a102..6c0a4508bc1 100644 --- a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java +++ b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/DataSourceUtils.java @@ -24,6 +24,7 @@ import javax.sql.DataSource; import com.alibaba.druid.pool.DruidDataSource; +import com.alibaba.druid.pool.vendor.MySqlValidConnectionChecker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,6 +72,11 @@ public static DataSource buildDataSource(String dbUrl, String username, String p MybatisConfiguration.MYBATIS_DATASOURCE_REMOVE_ABANDONED_ENABLED.getValue(); int removeAbandonedTimeout = MybatisConfiguration.MYBATIS_DATASOURCE_REMOVE_ABANDONED_TIMEOUT.getValue(); + + boolean jdbcKeepAlive = MybatisConfiguration.MYBATIS_DATASOURCE_KEEPALIVE_ENABLED.getValue(); + + boolean jdbcUsePingMethod = MybatisConfiguration.MYBATIS_DATASOURCE_USE_PING_ENABLED.getValue(); + DruidDataSource datasource = new DruidDataSource(); logger.info("Database connection address information(数据库连接地址信息)=" + dbUrl); datasource.setUrl(dbUrl); @@ -87,6 +93,16 @@ public static DataSource buildDataSource(String dbUrl, String username, String p datasource.setTestWhileIdle(testWhileIdle); datasource.setTestOnBorrow(testOnBorrow); datasource.setTestOnReturn(testOnReturn); + + datasource.setKeepAlive(jdbcKeepAlive); + + if (!jdbcUsePingMethod) { + // use test sql for keepalive + MySqlValidConnectionChecker checker = new MySqlValidConnectionChecker(); + checker.setUsePingMethod(false); + datasource.setValidConnectionChecker(checker); + } + datasource.setPoolPreparedStatements(poolPreparedStatements); datasource.setRemoveAbandoned(removeAbandoned); datasource.setRemoveAbandonedTimeout(removeAbandonedTimeout); diff --git a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java index 74e06d7ec3d..a365b6a50f6 100644 --- a/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java +++ b/linkis-commons/linkis-mybatis/src/main/java/org/apache/linkis/mybatis/conf/MybatisConfiguration.java @@ -45,7 +45,7 @@ public class MybatisConfiguration { public static final CommonVars BDP_SERVER_MYBATIS_DATASOURCE_MAXACTIVE = CommonVars.apply("wds.linkis.server.mybatis.datasource.maxActive", 20); public static final CommonVars BDP_SERVER_MYBATIS_DATASOURCE_MAXWAIT = - CommonVars.apply("wds.linkis.server.mybatis.datasource.maxWait", 6000); + CommonVars.apply("wds.linkis.server.mybatis.datasource.maxWait", 60000); public static final CommonVars BDP_SERVER_MYBATIS_DATASOURCE_TBERM = CommonVars.apply("wds.linkis.server.mybatis.datasource.timeBetweenEvictionRunsMillis", 60000); public static final CommonVars BDP_SERVER_MYBATIS_DATASOURCE_MEITM = @@ -63,6 +63,11 @@ public class MybatisConfiguration { CommonVars.apply("wds.linkis.server.mybatis.datasource.poolPreparedStatements", Boolean.TRUE); public static final CommonVars MYBATIS_DATASOURCE_REMOVE_ABANDONED_ENABLED = CommonVars.apply("wds.linkis.server.mybatis.remove.abandoned.enabled", Boolean.TRUE); + public static final CommonVars MYBATIS_DATASOURCE_KEEPALIVE_ENABLED = + CommonVars.apply("linkis.server.mybatis.keepalive.enabled", Boolean.TRUE); + public static final CommonVars MYBATIS_DATASOURCE_USE_PING_ENABLED = + CommonVars.apply("linkis.server.mybatis.use.ping.enabled", Boolean.TRUE); + public static final CommonVars MYBATIS_DATASOURCE_REMOVE_ABANDONED_TIMEOUT = CommonVars.apply("wds.linkis.server.mybatis.remove.abandoned.timeout", 300); } diff --git a/linkis-commons/linkis-mybatis/src/test/java/org/apache/linkis/mybatis/conf/MybatisConfigurationTest.java b/linkis-commons/linkis-mybatis/src/test/java/org/apache/linkis/mybatis/conf/MybatisConfigurationTest.java index 4aeddcdcc84..de7be27180d 100644 --- a/linkis-commons/linkis-mybatis/src/test/java/org/apache/linkis/mybatis/conf/MybatisConfigurationTest.java +++ b/linkis-commons/linkis-mybatis/src/test/java/org/apache/linkis/mybatis/conf/MybatisConfigurationTest.java @@ -66,7 +66,7 @@ public void constTest() { Assertions.assertTrue(1 == bdpServerMybatisDatasourceInitialsize.intValue()); Assertions.assertTrue(1 == bdpServerMybatisDatasourceMinidle.intValue()); Assertions.assertTrue(20 == bdpServerMybatisDatasourceMaxactive.intValue()); - Assertions.assertTrue(6000 == bdpServerMybatisDatasourceMaxwait.intValue()); + Assertions.assertTrue(60000 == bdpServerMybatisDatasourceMaxwait.intValue()); Assertions.assertTrue(60000 == bdpServerMybatisDatasourceTberm.intValue()); Assertions.assertTrue(300000 == bdpServerMybatisDatasourceMeitm.intValue()); Assertions.assertEquals("SELECT 1", bdpServerMybatisDatasourceValidationquery); diff --git a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java index 42661f82839..fd188ed429a 100644 --- a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java +++ b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java @@ -63,12 +63,13 @@ public interface TaskConstant { String JOB_MEMORY_PERCENT = "memoryPercent"; String JOB_CORE_RGB = "coreRGB"; String JOB_MEMORY_RGB = "memoryRGB"; - + String JOB_IS_REUSE = "isReuse"; String JOB_ENGINECONN_MAP = "engineconnMap"; String ENGINE_INSTANCE = "engineInstance"; String TICKET_ID = "ticketId"; String ENGINE_CONN_TASK_ID = "engineConnTaskId"; String ENGINE_CONN_SUBMIT_TIME = "engineConnSubmitTime"; + String DEBUG_ENBALE = "debug.enable"; String PARAMS_DATA_SOURCE = "dataSources"; @@ -83,4 +84,6 @@ public interface TaskConstant { String RECEIVER = "receiver"; String SUB_SYSTEM_ID = "subSystemId"; String EXTRA = "extra"; + String ECM_INSTANCE = "ecmInstance"; + String ENGINE_LOG_PATH = "engineLogPath"; } diff --git a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/engine/EngineInfo.java b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/engine/EngineInfo.java deleted file mode 100644 index 0504ee2113e..00000000000 --- a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/engine/EngineInfo.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine; - -public class EngineInfo { - - private Long id; - private EngineState engineState; - - public EngineInfo() {} - - public EngineInfo(Long id, EngineState state) { - this.id = id; - this.engineState = state; - } - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public EngineState getEngineState() { - return engineState; - } - - public void setEngineState(EngineState engineState) { - this.engineState = engineState; - } -} diff --git a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java index 28fb7a040eb..f09029b998c 100644 --- a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java +++ b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java @@ -18,6 +18,7 @@ package org.apache.linkis.protocol.util; import java.util.AbstractMap; +import java.util.Objects; public class ImmutablePair { @@ -53,6 +54,11 @@ public boolean equals(Object o) { } } + @Override + public int hashCode() { + return Objects.hashCode(entry); + } + private boolean eq(Object o1, Object o2) { if (null != o1 && null != o2) { return o1.equals(o2); diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala deleted file mode 100644 index 91b0e017270..00000000000 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.callback - -import org.apache.linkis.protocol.message.RequestProtocol - -// TODO: log type -case class LogCallbackProtocol(nodeId: String, logs: Array[String]) extends RequestProtocol - -case class YarnAPPIdCallbackProtocol(nodeId: String, applicationId: String) extends RequestProtocol - -case class YarnInfoCallbackProtocol(nodeId: String, uri: String) extends RequestProtocol diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineCallback.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineCallback.scala deleted file mode 100644 index 8856d3a9277..00000000000 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineCallback.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine - -object EngineCallback { - private val DWC_APPLICATION_NAME = "dwc.application.name" - private val DWC_INSTANCE = "dwc.application.instance" - - def mapToEngineCallback(options: Map[String, String]): EngineCallback = - EngineCallback(options(DWC_APPLICATION_NAME), options(DWC_INSTANCE)) - - def callbackToMap(engineCallback: EngineCallback): Map[String, String] = - Map( - DWC_APPLICATION_NAME -> engineCallback.applicationName, - DWC_INSTANCE -> engineCallback.instance - ) - -} - -case class EngineCallback(applicationName: String, instance: String) diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineStateTransitionRequest.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineStateTransitionRequest.scala deleted file mode 100644 index 9137001c145..00000000000 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/EngineStateTransitionRequest.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine - -case class EngineStateTransitionRequest(engineInstance: String, state: String) - -case class EngineStateTransitionResponse( - engineInstance: String, - state: String, - result: Boolean, - message: String -) diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestEngineStatus.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestEngineStatus.scala deleted file mode 100644 index a4672aa4e5d..00000000000 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestEngineStatus.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine - -import org.apache.linkis.protocol.RetryableProtocol -import org.apache.linkis.protocol.message.RequestProtocol - -case class RequestEngineStatus(messageType: Int) extends RetryableProtocol with RequestProtocol - -object RequestEngineStatus { - val Status_Only = 1 - val Status_Overload = 2 - val Status_Concurrent = 3 - val Status_Overload_Concurrent = 4 - val Status_BasicInfo = 5 - val ALL = 6 -} diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestUserEngineKill.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestUserEngineKill.scala deleted file mode 100644 index beb7987b01a..00000000000 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/RequestUserEngineKill.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine - -import org.apache.linkis.protocol.message.RequestProtocol - -case class RequestUserEngineKill( - ticketId: String, - creator: String, - user: String, - properties: Map[String, String] -) extends RequestProtocol - -case class ResponseUserEngineKill(ticketId: String, status: String, message: String) - -object ResponseUserEngineKill { - val Success = "Success" - val Error = "Error" -} diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ProtocolUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ProtocolUtils.scala deleted file mode 100644 index 1bb0791be39..00000000000 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ProtocolUtils.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.utils - -import org.apache.linkis.common.conf.CommonVars - -object ProtocolUtils { - - val SERVICE_SUFFIX = CommonVars("wds.linkis.service.suffix", "engineManager,entrance,engine") - val suffixs = SERVICE_SUFFIX.getValue.split(",") - - /** - * Pass in moduleName to return the corresponding appName 传入moduleName返回对应的appName - * @param moduleName - * module's name - * @return - * application's name - */ - def getAppName(moduleName: String): Option[String] = { - val moduleNameLower = moduleName.toLowerCase() - for (suffix <- suffixs) { - if (moduleNameLower.contains(suffix.toLowerCase())) { - return Some(moduleNameLower.replace(suffix.toLowerCase(), "")) - } - } - None - } - -} diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala index 3b94bbdc14b..3affc351d92 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala @@ -45,6 +45,14 @@ object TaskUtils { } } else params.put(key, waitToAdd) + private def clearMap(params: util.Map[String, AnyRef], key: String): Unit = + if (params != null && params.containsKey(key)) { + params.get(key) match { + case map: util.Map[String, AnyRef] => map.clear() + case _ => params.put(key, new util.HashMap[String, AnyRef]()) + } + } + private def getConfigurationMap( params: util.Map[String, AnyRef], key: String @@ -84,17 +92,35 @@ object TaskUtils { def addStartupMap(params: util.Map[String, AnyRef], startupMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, startupMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + def clearStartupMap(params: util.Map[String, AnyRef]): Unit = { + val configurationMap = getMap(params, TaskConstant.PARAMS_CONFIGURATION) + if (!configurationMap.isEmpty) { + clearMap(configurationMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + } + } + def addRuntimeMap(params: util.Map[String, AnyRef], runtimeMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, runtimeMap, TaskConstant.PARAMS_CONFIGURATION_RUNTIME) def addSpecialMap(params: util.Map[String, AnyRef], specialMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, specialMap, TaskConstant.PARAMS_CONFIGURATION_SPECIAL) - // tdoo + // todo def getLabelsMap(params: util.Map[String, AnyRef]): util.Map[String, AnyRef] = getMap(params, TaskConstant.LABELS) def addLabelsMap(params: util.Map[String, AnyRef], labels: util.Map[String, AnyRef]): Unit = addMap(params, labels, TaskConstant.LABELS) + def isWithDebugInfo(params: util.Map[String, AnyRef]): Boolean = { + val debug = getConfigurationMap(params, TaskConstant.PARAMS_CONFIGURATION_STARTUP).get( + TaskConstant.DEBUG_ENBALE + ) + if (debug != null && "true".equals(debug.toString)) { + true + } else { + false + } + } + } diff --git a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/RequestEngineStatusTest.scala b/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/RequestEngineStatusTest.scala deleted file mode 100644 index d9fc07b6c0f..00000000000 --- a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/RequestEngineStatusTest.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine - -import org.junit.jupiter.api.{Assertions, DisplayName, Test} - -class RequestEngineStatusTest { - - @Test - @DisplayName("constTest") - def constTest(): Unit = { - - val statusOnly = RequestEngineStatus.Status_Only - val statusOverload = RequestEngineStatus.Status_Overload - val statusConcurrent = RequestEngineStatus.Status_Concurrent - val statusOverloadConcurrent = RequestEngineStatus.Status_Overload_Concurrent - val statusBasicInfo = RequestEngineStatus.Status_BasicInfo - val all = RequestEngineStatus.ALL - - Assertions.assertTrue(1 == statusOnly) - Assertions.assertTrue(2 == statusOverload) - Assertions.assertTrue(3 == statusConcurrent) - Assertions.assertTrue(4 == statusOverloadConcurrent) - Assertions.assertTrue(5 == statusBasicInfo) - Assertions.assertTrue(6 == all) - - } - -} diff --git a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/ResponseUserEngineKillTest.scala b/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/ResponseUserEngineKillTest.scala deleted file mode 100644 index dbf3f5e3b57..00000000000 --- a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/engine/ResponseUserEngineKillTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.engine - -import org.junit.jupiter.api.{Assertions, DisplayName, Test} - -class ResponseUserEngineKillTest { - - @Test - @DisplayName("constTest") - def constTest(): Unit = { - - val success = ResponseUserEngineKill.Success - val error = ResponseUserEngineKill.Error - - Assertions.assertEquals("Success", success) - Assertions.assertEquals("Error", error) - } - -} diff --git a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/utils/ProtocolUtilsTest.scala b/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/utils/ProtocolUtilsTest.scala deleted file mode 100644 index 2435f514970..00000000000 --- a/linkis-commons/linkis-protocol/src/test/scala/org/apache/linkis/protocol/utils/ProtocolUtilsTest.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.protocol.utils - -import org.junit.jupiter.api.{Assertions, DisplayName, Test} - -class ProtocolUtilsTest { - - @Test - @DisplayName("constTest") - def constTest(): Unit = { - - val serviceSuffix = ProtocolUtils.SERVICE_SUFFIX.getValue - val suffixs = ProtocolUtils.suffixs - - Assertions.assertNotNull(serviceSuffix) - Assertions.assertTrue(suffixs.length == 3) - } - - @Test - @DisplayName("getAppNameTest") - def getAppNameTest(): Unit = { - - val modeleName = "engineManager" - val appNameOption = ProtocolUtils.getAppName(modeleName) - Assertions.assertNotNull(appNameOption.get) - - } - -} diff --git a/linkis-commons/linkis-rpc/pom.xml b/linkis-commons/linkis-rpc/pom.xml index 3a51a0bfcd9..e916107f501 100644 --- a/linkis-commons/linkis-rpc/pom.xml +++ b/linkis-commons/linkis-rpc/pom.xml @@ -56,15 +56,7 @@ com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind + * com.google.code.findbugs @@ -78,8 +70,17 @@ org.springframework.cloud spring-cloud-commons + + commons-fileupload + commons-fileupload + + + commons-fileupload + commons-fileupload + ${commons-fileupload.version} + org.springframework.cloud spring-cloud-commons diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java index 3723ca145df..5aabaccea2f 100644 --- a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java +++ b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/errorcode/LinkisRpcErrorCodeSummary.java @@ -28,8 +28,14 @@ public enum LinkisRpcErrorCodeSummary implements LinkisErrorCode { 10003, "The corresponding anti-sequence class was not found:{0}(找不到对应的反序列类:{0})"), CORRESPONDING_TO_INITIALIZE( 10004, "The corresponding anti-sequence class:{0} failed to initialize(对应的反序列类:{0} 初始化失败)"), + CORRESPONDING_CLASS_ILLEGAL( + 10005, "The corresponding anti-sequence class:{0} is illegal (对应的反序列类:{0} 不合法)"), APPLICATION_IS_NOT_EXISTS( 10051, "The instance:{0} of application {1} does not exist(应用程序:{0} 的实例:{1} 不存在)."), + + INSTANCE_ERROR(10052, "The instance:{0} is error should ip:port."), + + INSTANCE_NOT_FOUND_ERROR(10053, "The instance:{0} is not found."), RPC_INIT_ERROR(10054, "Asyn RPC Consumer Thread has stopped!(Asyn RPC Consumer 线程已停止!)"); /** 错误码 */ diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtils.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtils.java deleted file mode 100644 index f022fc8c7fb..00000000000 --- a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtils.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.message.utils; - -import org.springframework.cloud.openfeign.ribbon.LoadBalancerFeignClient; - -import java.lang.reflect.Field; - -import feign.Request.Options; - -public class LoadBalancerOptionsUtils { - - private static Options DEFAULT_OPTIONS = null; - - private static Object locker = new Object(); - - public static Options getDefaultOptions() throws NoSuchFieldException, IllegalAccessException { - if (null == DEFAULT_OPTIONS) { - synchronized (locker) { - Class clazz = LoadBalancerFeignClient.class; - Field optionField = clazz.getDeclaredField("DEFAULT_OPTIONS"); - optionField.setAccessible(true); - Object o = optionField.get(clazz); - DEFAULT_OPTIONS = (Options) o; - } - } - return DEFAULT_OPTIONS; - } -} diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java index 6743e66a850..c377a7c4dfb 100644 --- a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java +++ b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java @@ -61,8 +61,11 @@ public static String serialize(T obj) { public static T deserialize(String str, Class clazz) { Schema schema = getSchema(clazz); - T obj = schema.newMessage(); - ProtostuffIOUtil.mergeFrom(toByteArray(str), obj, schema); + T obj = null; + if (schema != null) { + obj = schema.newMessage(); + ProtostuffIOUtil.mergeFrom(toByteArray(str), obj, schema); + } return obj; } @@ -93,7 +96,7 @@ public static byte[] toByteArray(String hexString) { for (int i = 0; i < byteArray.length; i++) { byte high = (byte) (Character.digit(hexString.charAt(k), 16) & 0xff); byte low = (byte) (Character.digit(hexString.charAt(k + 1), 16) & 0xff); - byteArray[i] = (byte) (high << 4 | low); + byteArray[i] = (byte) (high << 4 | low); // NOSONAR k += 2; } return byteArray; diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala index 85beb877329..daea47106b8 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/MessageReceiver.scala @@ -40,6 +40,10 @@ class MessageReceiver extends Receiver with Logging { logger.info("From caller {} get sync message", RPCUtils.getServiceInstanceFromSender(sender)) message match { case requestProtocol: RequestProtocol => + logger.info( + "With message requestProtocol class name:{}", + requestProtocol.getClass.getSimpleName + ) val methodExecuteWrapper = receiverMethodSearcher.getMethodExecuteWrappers(requestProtocol) messageExecutor.execute(requestProtocol, methodExecuteWrapper, sender) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala index aa92605f9b8..00fa019d990 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/RPCSpringBeanCache.scala @@ -19,7 +19,7 @@ package org.apache.linkis.rpc import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.utils.Logging -import org.apache.linkis.rpc.interceptor.{RPCInterceptor, RPCLoadBalancer, RPCServerLoader} +import org.apache.linkis.rpc.interceptor.{RPCInterceptor, RPCServerLoader} import org.apache.linkis.rpc.interceptor.common.BroadcastSenderBuilder import java.util @@ -30,7 +30,6 @@ private[rpc] object RPCSpringBeanCache extends Logging { import DataWorkCloudApplication.getApplicationContext private var beanNameToReceivers: util.Map[String, Receiver] = _ private var rpcInterceptors: Array[RPCInterceptor] = _ - private var rpcLoadBalancers: Array[RPCLoadBalancer] = _ private var rpcServerLoader: RPCServerLoader = _ private var senderBuilders: Array[BroadcastSenderBuilder] = _ private var rpcReceiveRestful: RPCReceiveRestful = _ @@ -83,18 +82,6 @@ private[rpc] object RPCSpringBeanCache extends Logging { rpcInterceptors } - private[rpc] def getRPCLoadBalancers: Array[RPCLoadBalancer] = { - if (rpcLoadBalancers == null) { - rpcLoadBalancers = getApplicationContext - .getBeansOfType(classOf[RPCLoadBalancer]) - .asScala - .map(_._2) - .toArray - .sortBy(_.order) - } - rpcLoadBalancers - } - private[rpc] def getRPCServerLoader: RPCServerLoader = { if (rpcServerLoader == null) { rpcServerLoader = getApplicationContext.getBean(classOf[RPCServerLoader]) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala index 32f12da2731..dd52687f714 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala @@ -65,14 +65,6 @@ object RPCConfiguration { "cs,contextservice,data-source-manager,metadataQuery,metadatamanager,query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource,basedata-manager" ).getValue.split(",") - val METADATAQUERY_SERVICE_APPLICATION_NAME: CommonVars[String] = - CommonVars("wds.linkis.gateway.conf.publicservice.name", "linkis-ps-metadataquery") - - val METADATAQUERY_SERVICE_LIST: Array[String] = CommonVars( - "wds.linkis.gateway.conf.metadataquery.list", - "metadatamanager,metadataquery" - ).getValue.split(",") - val LINKIS_MANAGER_SERVICE_NAME: CommonVars[String] = CommonVars("wds.linkis.gateway.conf.linkismanager.name", "linkis-cg-linkismanager") @@ -80,6 +72,16 @@ object RPCConfiguration { CommonVars("wds.linkis.gateway.conf.linkismanager.list", "linkisManager,engineplugin").getValue .split(",") + val LINKIS_DATASOURCE_SERVICE_NAME: CommonVars[String] = + CommonVars("linkis.gateway.conf.linkisdatasource.name", "linkis-ps-datasource") + + val LINKIS_DATASOURCE_SERVICE_LIST: Array[String] = + CommonVars( + "linkis.gateway.conf.linkisdatasource.list", + "data-source-manager,metadataquery,datasource,metadataQuery,metadatamanager" + ).getValue + .split(",") + val BDP_RPC_INSTANCE_ALIAS_SERVICE_REFRESH_INTERVAL: CommonVars[TimeType] = CommonVars("wds.linkis.rpc.instancealias.refresh.interval", new TimeType("3s")) @@ -95,13 +97,17 @@ object RPCConfiguration { val SERVICE_SCAN_PACKAGE: String = CommonVars("wds.linkis.ms.service.scan.package", "org.apache.linkis").getValue - val ENABLE_SPRING_PARAMS: Boolean = - CommonVars("wds.linkis.rpc.spring.params.enable", false).getValue - // unit is HOUR val SENDER_CACHE_CLEANING_HOUR = CommonVars("linkis.rpc.sender.cache.cleaning.time.hour", 6).getValue + // unit is HOUR + val RPC_RETRY_NUMBER = + CommonVars("linkis.rpc.retry.number", 5).getValue + + val RPC_RETRY_PERIOD = + CommonVars[Long]("linkis.rpc.retry.period", 30000L).getValue + val REFLECTIONS = new Reflections( SERVICE_SCAN_PACKAGE, new MethodAnnotationsScanner(), @@ -112,6 +118,15 @@ object RPCConfiguration { val BDP_RPC_CACHE_CONF_EXPIRE_TIME: CommonVars[Long] = CommonVars("wds.linkis.rpc.cache.expire.time", 120000L) + val ENABLE_SPRING_PARAMS: Boolean = + CommonVars("wds.linkis.rpc.spring.params.enable", false).getValue + + val RPC_READ_TIME_OUT: Int = + CommonVars[Int]("spring.ribbon.ReadTimeout", 100000).getValue + + val RPC_CONNECT_TIME_OUT: Int = + CommonVars[Int]("spring.ribbon.ConnectTimeout", 100000).getValue + val CONTEXT_SERVICE_REQUEST_PREFIX = "contextservice" val CONTEXT_SERVICE_NAME: String = @@ -124,4 +139,17 @@ object RPCConfiguration { CONTEXT_SERVICE_APPLICATION_NAME.getValue } + val configOptions: feign.Request.Options = + new feign.Request.Options(RPC_CONNECT_TIME_OUT, RPC_READ_TIME_OUT, true) + + val RPC_OBJECT_PREFIX_WHITE_LIST: Array[String] = + CommonVars( + "wds.linkis.rpc.object.class.prefix.whitelist", + "org.apache.linkis,com.webank.wedatasphere,com.wedatasphere" + ).getValue + .split(",") + + val ENABLE_RPC_OBJECT_PREFIX_WHITE_LIST_CHECK: Boolean = + CommonVars("wds.linkis.rpc.object.class.prefix.whitelist.check.enable", true).getValue + } diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCLoadBalancer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCLoadBalancer.scala deleted file mode 100644 index 7ae331265b8..00000000000 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCLoadBalancer.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.interceptor - -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.protocol.Protocol - -import com.netflix.loadbalancer.ILoadBalancer - -trait RPCLoadBalancer { - - val order: Int - - def choose( - protocol: Protocol, - originService: ServiceInstance, - lb: ILoadBalancer - ): Option[ServiceInstance] - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala index 8cab6d7d0fa..a210a068e1d 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/RPCServerLoader.scala @@ -28,16 +28,11 @@ import java.text.MessageFormat import scala.collection.JavaConverters._ import scala.concurrent.duration.Duration -import com.netflix.loadbalancer.{DynamicServerListLoadBalancer, ILoadBalancer, Server} - trait RPCServerLoader { @throws[NoInstanceExistsException] def getOrRefreshServiceInstance(serviceInstance: ServiceInstance): Unit - @throws[NoInstanceExistsException] - def getServer(lb: ILoadBalancer, serviceInstance: ServiceInstance): Server - def getServiceInstances(applicationName: String): Array[ServiceInstance] } @@ -50,14 +45,6 @@ abstract class AbstractRPCServerLoader extends RPCServerLoader with Logging { def refreshAllServers(): Unit - protected def refreshServerList(lb: ILoadBalancer): Unit = { - refreshAllServers() - lb match { - case d: DynamicServerListLoadBalancer[_] => d.updateListOfServers() - case _ => - } - } - private def getOrRefresh( refresh: => Unit, refreshed: => Boolean, @@ -101,15 +88,6 @@ abstract class AbstractRPCServerLoader extends RPCServerLoader with Logging { serviceInstance ) - override def getServer(lb: ILoadBalancer, serviceInstance: ServiceInstance): Server = { - getOrRefresh( - refreshServerList(lb), - lb.getAllServers.asScala.exists(_.getHostPort == serviceInstance.getInstance), - serviceInstance - ) - lb.getAllServers.asScala.find(_.getHostPort == serviceInstance.getInstance).get - } - def getDWCServiceInstance(serviceInstance: SpringCloudServiceInstance): ServiceInstance override def getServiceInstances(applicationName: String): Array[ServiceInstance] = diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/InstanceRPCLoadBalancer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/InstanceRPCLoadBalancer.scala deleted file mode 100644 index 6cdac0df9ff..00000000000 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/InstanceRPCLoadBalancer.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.interceptor.common - -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.protocol.{InstanceProtocol, Protocol} -import org.apache.linkis.rpc.interceptor.RPCLoadBalancer - -import org.springframework.stereotype.Component - -import com.netflix.loadbalancer.ILoadBalancer - -@Component -class InstanceRPCLoadBalancer extends RPCLoadBalancer { - override val order: Int = 10 - - override def choose( - protocol: Protocol, - originService: ServiceInstance, - lb: ILoadBalancer - ): Option[ServiceInstance] = protocol match { - case instance: InstanceProtocol => - instance.choseInstance.map(ServiceInstance(originService.getApplicationName, _)) - case _ => None - } - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala index 4faeaa180e4..6c6e9592f18 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/RetryableRPCInterceptor.scala @@ -21,6 +21,7 @@ import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.common.utils.RetryHandler import org.apache.linkis.protocol.RetryableProtocol +import org.apache.linkis.rpc.conf.RPCConfiguration import org.apache.linkis.rpc.exception.DWCRPCRetryException import org.apache.linkis.rpc.interceptor.{ RPCInterceptor, @@ -34,7 +35,7 @@ import org.apache.commons.lang3.StringUtils import org.springframework.stereotype.Component -import java.net.ConnectException +import java.net.{ConnectException, SocketTimeoutException} import feign.RetryableException @@ -67,12 +68,13 @@ class RetryableRPCInterceptor extends RPCInterceptor { class RPCRetryHandler extends RetryHandler { addRetryException(classOf[ConnectException]) addRetryException(classOf[RetryableException]) + addRetryException(classOf[SocketTimeoutException]) private var serviceInstance: Option[ServiceInstance] = None def setRetryInfo(retry: RetryableProtocol, chain: RPCInterceptorChain): Unit = { - setRetryNum(retry.retryNum) - setRetryPeriod(retry.period) - setRetryMaxPeriod(retry.maxPeriod) + setRetryNum(RPCConfiguration.RPC_RETRY_NUMBER) + setRetryPeriod(RPCConfiguration.RPC_RETRY_PERIOD) + setRetryMaxPeriod(RPCConfiguration.RPC_RETRY_PERIOD * 2) retry.retryExceptions.foreach(addRetryException) chain match { case s: ServiceInstanceRPCInterceptorChain => diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/SingleInstanceRPCLoadBalancer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/SingleInstanceRPCLoadBalancer.scala deleted file mode 100644 index b007838ea6d..00000000000 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/interceptor/common/SingleInstanceRPCLoadBalancer.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.interceptor.common - -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.protocol.{Protocol, SingleInstanceProtocol} -import org.apache.linkis.rpc.interceptor.RPCLoadBalancer - -import org.apache.commons.lang3.StringUtils - -import org.springframework.stereotype.Component - -import com.netflix.loadbalancer.ILoadBalancer - -@Component -class SingleInstanceRPCLoadBalancer extends RPCLoadBalancer with Logging { - override val order: Int = 20 - - override def choose( - protocol: Protocol, - originService: ServiceInstance, - lb: ILoadBalancer - ): Option[ServiceInstance] = protocol match { - case _: SingleInstanceProtocol => - if (StringUtils.isEmpty(originService.getInstance)) synchronized { - if (StringUtils.isEmpty(originService.getInstance)) { - val servers = lb.getAllServers - val server = servers.get((math.random * servers.size()).toInt) - originService.setInstance(server.getHostPort) - logger.warn( - originService.getApplicationName + " choose " + server.getHostPort + " to build a single instance connection." - ) - } - } - Some(originService) - case _ => None - } - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala index 06f13c70a9e..b8b41524d5e 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringCloudFeignConfigurationCache.scala @@ -24,7 +24,7 @@ import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.autoconfigure.AutoConfigureBefore import org.springframework.cloud.client.discovery.DiscoveryClient import org.springframework.cloud.client.loadbalancer.LoadBalancedRetryFactory -import org.springframework.cloud.netflix.ribbon.SpringClientFactory +import org.springframework.cloud.loadbalancer.support.LoadBalancerClientFactory import org.springframework.cloud.openfeign.FeignClientsConfiguration import org.springframework.context.annotation.{Configuration, Import} @@ -48,7 +48,7 @@ class SpringCloudFeignConfigurationCache( private var discoveryClient: DiscoveryClient = _ @Autowired - private var clientFactory: SpringClientFactory = _ + private var loadBalancerClientFactory: LoadBalancerClientFactory = _ @Autowired(required = false) private var loadBalancedRetryFactory: LoadBalancedRetryFactory = _ @@ -56,7 +56,7 @@ class SpringCloudFeignConfigurationCache( @PostConstruct def storeFeignConfiguration(): Unit = { SpringCloudFeignConfigurationCache.client = client - SpringCloudFeignConfigurationCache.clientFactory = clientFactory + SpringCloudFeignConfigurationCache.loadBalancerClientFactory = loadBalancerClientFactory SpringCloudFeignConfigurationCache.loadBalancedRetryFactory = loadBalancedRetryFactory SpringCloudFeignConfigurationCache.contract = contract SpringCloudFeignConfigurationCache.decoder = decoder @@ -71,7 +71,9 @@ private[linkis] object SpringCloudFeignConfigurationCache { private[SpringCloudFeignConfigurationCache] var decoder: Decoder = _ private[SpringCloudFeignConfigurationCache] var contract: Contract = _ private[SpringCloudFeignConfigurationCache] var client: Client = _ - private[SpringCloudFeignConfigurationCache] var clientFactory: SpringClientFactory = _ + + private[SpringCloudFeignConfigurationCache] var loadBalancerClientFactory + : LoadBalancerClientFactory = _ private[SpringCloudFeignConfigurationCache] var loadBalancedRetryFactory : LoadBalancedRetryFactory = _ @@ -92,7 +94,7 @@ private[linkis] object SpringCloudFeignConfigurationCache { client } - private[rpc] def getClientFactory = clientFactory + private[rpc] def getLoadloadBalancerClientFactory = loadBalancerClientFactory private[rpc] def getLoadBalancedRetryFactory = loadBalancedRetryFactory private[linkis] def getDiscoveryClient = { diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala index ab4f2d7fe30..9bb2fdea964 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/sender/SpringMVCRPCSender.scala @@ -18,38 +18,20 @@ package org.apache.linkis.rpc.sender import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.common.conf.{Configuration => DWCConfiguration} -import org.apache.linkis.protocol.Protocol +import org.apache.linkis.common.utils.Logging import org.apache.linkis.rpc.{BaseRPCSender, RPCMessageEvent, RPCSpringBeanCache} import org.apache.linkis.rpc.conf.RPCConfiguration -import org.apache.linkis.rpc.interceptor.{ - RPCInterceptor, - RPCLoadBalancer, - ServiceInstanceRPCInterceptorChain -} -import org.apache.linkis.rpc.message.utils.LoadBalancerOptionsUtils -import org.apache.linkis.rpc.transform.RPCConsumer -import org.apache.linkis.server.{BDPJettyServerHelper, Message} +import org.apache.linkis.rpc.interceptor.{RPCInterceptor, ServiceInstanceRPCInterceptorChain} +import org.apache.linkis.server.conf.ServerConfiguration import org.apache.commons.lang3.StringUtils -import org.springframework.cloud.netflix.ribbon.ServerIntrospector -import org.springframework.cloud.openfeign.ribbon.{ - CachingSpringLoadBalancerFactory, - FeignLoadBalancer, - LoadBalancerFeignClient -} - -import java.lang.reflect.Field - -import com.netflix.client.ClientRequest -import com.netflix.client.config.IClientConfig -import com.netflix.loadbalancer.reactive.LoadBalancerCommand import feign._ private[rpc] class SpringMVCRPCSender private[rpc] ( private[rpc] val serviceInstance: ServiceInstance -) extends BaseRPCSender(serviceInstance.getApplicationName) { +) extends BaseRPCSender(serviceInstance.getApplicationName) + with Logging { import SpringCloudFeignConfigurationCache._ @@ -59,77 +41,40 @@ private[rpc] class SpringMVCRPCSender private[rpc] ( override protected def createRPCInterceptorChain() = new ServiceInstanceRPCInterceptorChain(0, getRPCInterceptors, serviceInstance) - protected def getRPCLoadBalancers: Array[RPCLoadBalancer] = - RPCSpringBeanCache.getRPCLoadBalancers - + /** + * If it's a random call, you don't need to set target specify instance,need to specify target and + * do not set client setting + * @param builder + */ override protected def doBuilder(builder: Feign.Builder): Unit = { - val client = getClient.asInstanceOf[LoadBalancerFeignClient] - val newClient = new LoadBalancerFeignClient( - client.getDelegate, - new CachingSpringLoadBalancerFactory(getClientFactory) { - override def create(clientName: String): FeignLoadBalancer = { - val serverIntrospector = - getClientFactory.getInstance(clientName, classOf[ServerIntrospector]) - new FeignLoadBalancer( - getClientFactory.getLoadBalancer(clientName), - getClientFactory.getClientConfig(clientName), - serverIntrospector - ) { - override def customizeLoadBalancerCommandBuilder( - request: FeignLoadBalancer.RibbonRequest, - config: IClientConfig, - builder: LoadBalancerCommand.Builder[FeignLoadBalancer.RibbonResponse] - ): Unit = { - val instance = - if (getRPCLoadBalancers.isEmpty) None - else { - val requestBody = SpringMVCRPCSender.getRequest(request).body() - val requestStr = new String(requestBody, DWCConfiguration.BDP_ENCODING.getValue) - val obj = RPCConsumer.getRPCConsumer.toObject( - BDPJettyServerHelper.gson.fromJson(requestStr, classOf[Message]) - ) - obj match { - case protocol: Protocol => - var serviceInstance: Option[ServiceInstance] = None - for (lb <- getRPCLoadBalancers if serviceInstance.isEmpty) - serviceInstance = lb.choose( - protocol, - SpringMVCRPCSender.this.serviceInstance, - getLoadBalancer - ) - serviceInstance.foreach(f => - logger.info( - "origin serviceInstance: " + SpringMVCRPCSender.this.serviceInstance + ", chose serviceInstance: " + f - ) - ) // TODO just for test - serviceInstance - case _ => None - } - } - instance - .orElse(Option(SpringMVCRPCSender.this.serviceInstance)) - .filter(s => StringUtils.isNotBlank(s.getInstance)) - .foreach { serviceInstance => - val server = RPCSpringBeanCache.getRPCServerLoader - .getServer(getLoadBalancer, serviceInstance) - builder.withServer(server) - } - } - } + if (serviceInstance != null && StringUtils.isNotBlank(serviceInstance.getInstance)) { + builder.requestInterceptor(new RequestInterceptor() { + def apply(template: RequestTemplate): Unit = { + template.target( + s"http://${serviceInstance.getInstance}${ServerConfiguration.BDP_SERVER_RESTFUL_URI.getValue}" + ) } - }, - getClientFactory - ) - if (RPCConfiguration.ENABLE_SPRING_PARAMS) { - builder.options(LoadBalancerOptionsUtils.getDefaultOptions) + }) } super.doBuilder(builder) - builder - .contract(getContract) - .encoder(getEncoder) - .decoder(getDecoder) - .client(newClient) - .requestInterceptor(getRPCTicketIdRequestInterceptor) + if (RPCConfiguration.ENABLE_SPRING_PARAMS) { + builder.options(RPCConfiguration.configOptions) + } + if (StringUtils.isBlank(serviceInstance.getInstance)) { + builder + .contract(getContract) + .encoder(getEncoder) + .decoder(getDecoder) + .client(getClient) + .requestInterceptor(getRPCTicketIdRequestInterceptor) + } else { + builder + .contract(getContract) + .encoder(getEncoder) + .decoder(getDecoder) + .requestInterceptor(getRPCTicketIdRequestInterceptor) + } + } /** @@ -160,18 +105,3 @@ private[rpc] class SpringMVCRPCSender private[rpc] ( } else s"RPCSender($getApplicationName, ${serviceInstance.getInstance})" } - -private object SpringMVCRPCSender { - private var requestField: Field = _ - - def getRequest(req: ClientRequest): Request = { - if (requestField == null) synchronized { - if (requestField == null) { - requestField = req.getClass.getDeclaredField("request") - requestField.setAccessible(true) - } - } - requestField.get(req).asInstanceOf[Request] - } - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/JavaCollectionSerializer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/JavaCollectionSerializer.scala deleted file mode 100644 index 5c9e1636894..00000000000 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/JavaCollectionSerializer.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.transform - -import org.apache.linkis.server.BDPJettyServerHelper - -import org.json4s.{CustomSerializer, JArray, JObject} -import org.json4s.jackson.JsonMethods.parse -import org.json4s.jackson.Serialization.write - -// TODO is now only the simplest implementation, and there is a need to optimize it later.(TODO 现在只做最简单的实现,后续有需要再优化) - -object JavaCollectionSerializer - extends CustomSerializer[java.util.List[_]](implicit formats => - ( - { case j: JArray => - BDPJettyServerHelper.gson.fromJson(write(j), classOf[java.util.List[_]]) - }, - { case list: java.util.List[_] => - parse(BDPJettyServerHelper.gson.toJson(list)) - } - ) - ) - -object JavaMapSerializer - extends CustomSerializer[java.util.Map[_, _]](implicit formats => - ( - { case j: JObject => - BDPJettyServerHelper.gson.fromJson(write(j), classOf[java.util.Map[_, _]]) - }, - { case map: java.util.Map[_, _] => - parse(BDPJettyServerHelper.gson.toJson(map)) - } - ) - ) diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala index a77ff9f71f4..85e956fe04b 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCConsumer.scala @@ -19,6 +19,8 @@ package org.apache.linkis.rpc.transform import org.apache.linkis.common.exception.ExceptionManager import org.apache.linkis.common.utils.Utils +import org.apache.linkis.rpc.conf.RPCConfiguration +import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.CORRESPONDING_CLASS_ILLEGAL import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.CORRESPONDING_NOT_FOUND import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary.CORRESPONDING_TO_INITIALIZE import org.apache.linkis.rpc.exception.DWCURIException @@ -50,7 +52,19 @@ private[linkis] object RPCConsumer { if (data.isEmpty) return BoxedUnit.UNIT val objectStr = data.get(OBJECT_VALUE).toString val objectClass = data.get(CLASS_VALUE).toString + val isRequestProtocol = data.get(IS_REQUEST_PROTOCOL_CLASS).toString logger.debug("The corresponding anti-sequence is class {}", objectClass) + if ( + RPCConfiguration.ENABLE_RPC_OBJECT_PREFIX_WHITE_LIST_CHECK && "true".equals( + isRequestProtocol + ) && !RPCConfiguration.RPC_OBJECT_PREFIX_WHITE_LIST + .exists(prefix => objectClass.startsWith(prefix)) + ) { + throw new DWCURIException( + CORRESPONDING_CLASS_ILLEGAL.getErrorCode, + MessageFormat.format(CORRESPONDING_CLASS_ILLEGAL.getErrorDesc, objectClass) + ) + } val clazz = Utils.tryThrow(Class.forName(objectClass)) { case _: ClassNotFoundException => new DWCURIException( diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCFormats.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCFormats.scala deleted file mode 100644 index 4e94584a37a..00000000000 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCFormats.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.transform - -import org.json4s.Serializer - -trait RPCFormats { - - def getSerializers: Array[Serializer[_]] - -} diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala index a2b9ae95ec1..afb4d17ea52 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/transform/RPCProduct.scala @@ -17,7 +17,6 @@ package org.apache.linkis.rpc.transform -import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.utils.Logging import org.apache.linkis.protocol.message.RequestProtocol import org.apache.linkis.rpc.errorcode.LinkisRpcErrorCodeSummary @@ -26,15 +25,6 @@ import org.apache.linkis.rpc.exception.DWCURIException import org.apache.linkis.rpc.serializer.ProtostuffSerializeUtil import org.apache.linkis.server.{EXCEPTION_MSG, Message} -import org.apache.commons.lang3.ClassUtils - -import java.lang.reflect.{ParameterizedType, Type} -import java.util - -import scala.collection.JavaConverters.mapAsScalaMapConverter - -import org.json4s.{DefaultFormats, Formats, Serializer} - private[linkis] trait RPCProduct { def toMessage(t: Any): Message @@ -48,25 +38,11 @@ private[linkis] trait RPCProduct { private[linkis] object RPCProduct extends Logging { private[rpc] val IS_REQUEST_PROTOCOL_CLASS = "rpc_is_request_protocol" - private[rpc] val IS_SCALA_CLASS = "rpc_is_scala_class" private[rpc] val CLASS_VALUE = "rpc_object_class" private[rpc] val OBJECT_VALUE = "rpc_object_value" - private[rpc] implicit var formats: Formats = - DefaultFormats + JavaCollectionSerializer + JavaMapSerializer - - private var serializerClasses: List[Class[_]] = List.empty - private val rpcProduct: RPCProduct = new RPCProduct { - private val rpcFormats = - DataWorkCloudApplication.getApplicationContext.getBeansOfType(classOf[RPCFormats]) - - if (rpcFormats != null && !rpcFormats.isEmpty) { - val serializers = rpcFormats.asScala.map(_._2.getSerializers).toArray.flatMap(_.iterator) - setFormats(serializers) - } - override def toMessage(t: Any): Message = { if (t == null) { throw new DWCURIException( @@ -105,38 +81,7 @@ private[linkis] object RPCProduct extends Logging { } - private[rpc] def setFormats(serializer: Array[Serializer[_]]): Unit = { - this.formats = (serializer :+ JavaCollectionSerializer :+ JavaMapSerializer).foldLeft( - DefaultFormats.asInstanceOf[Formats] - )(_ + _) - serializerClasses = formats.customSerializers - .map(s => getActualTypeClass(s.getClass.getGenericSuperclass)) - .filter(_ != null) ++: List(classOf[util.List[_]], classOf[util.Map[_, _]]) - logger.info( - "RPC Serializers: " + this.formats.customSerializers - .map(_.getClass.getSimpleName) + ", serializerClasses: " + - "" + serializerClasses - ) - } - - private def getActualTypeClass(classType: Type): Class[_] = classType match { - case p: ParameterizedType => - val params = p.getActualTypeArguments - if (params == null || params.isEmpty) null - else getActualTypeClass(params(0)) - case c: Class[_] => c - case _ => null - } - private[rpc] def isRequestProtocol(obj: Any): Boolean = obj.isInstanceOf[RequestProtocol] - private[rpc] def isScalaClass(obj: Any): Boolean = - (obj.isInstanceOf[Product] && obj.isInstanceOf[Serializable]) || - serializerClasses.exists(ClassUtils.isAssignable(obj.getClass, _)) || - obj.getClass.getName.startsWith("scala.") - - private[rpc] def getSerializableScalaClass(clazz: Class[_]): Class[_] = - serializerClasses.find(ClassUtils.isAssignable(clazz, _)).getOrElse(clazz) - def getRPCProduct: RPCProduct = rpcProduct } diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala index e7d48305acb..5ee3b1ca487 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/utils/RPCUtils.scala @@ -25,13 +25,14 @@ import org.apache.linkis.rpc.sender.{SpringCloudFeignConfigurationCache, SpringM import org.apache.commons.lang3.StringUtils +import org.springframework.cloud.client.loadbalancer.RetryableStatusCodeException + import java.lang.reflect.UndeclaredThrowableException import java.net.ConnectException import java.util.Locale import scala.collection.JavaConverters._ -import com.netflix.client.ClientException import feign.RetryableException object RPCUtils { @@ -53,11 +54,10 @@ object RPCUtils { } case t: RuntimeException => t.getCause match { - case client: ClientException => - StringUtils.isNotBlank(client.getErrorMessage) && - client.getErrorMessage.contains( - "Load balancer does not have available server for client" - ) + // case client: ClientException => + case client: RetryableStatusCodeException => + StringUtils.isNotBlank(client.getMessage) && + client.getMessage.contains("Load balancer does not have available server for client") case _ => false } case _ => false diff --git a/linkis-commons/linkis-rpc/src/test/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtilsTest.java b/linkis-commons/linkis-rpc/src/test/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtilsTest.java deleted file mode 100644 index d265371d60d..00000000000 --- a/linkis-commons/linkis-rpc/src/test/java/org/apache/linkis/rpc/message/utils/LoadBalancerOptionsUtilsTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc.message.utils; - -import feign.Request; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class LoadBalancerOptionsUtilsTest { - - @Test - @DisplayName("getDefaultOptionsTest") - public void getDefaultOptionsTest() throws NoSuchFieldException, IllegalAccessException { - - Request.Options defaultOptions = LoadBalancerOptionsUtils.getDefaultOptions(); - Assertions.assertNotNull(defaultOptions); - } -} diff --git a/linkis-commons/linkis-rpc/src/test/scala/org/apache/linkis/rpc/RPCFormatsTest.scala b/linkis-commons/linkis-rpc/src/test/scala/org/apache/linkis/rpc/RPCFormatsTest.scala deleted file mode 100644 index fbaf1b9b228..00000000000 --- a/linkis-commons/linkis-rpc/src/test/scala/org/apache/linkis/rpc/RPCFormatsTest.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.rpc - -import org.apache.linkis.rpc.transform.{JavaCollectionSerializer, JavaMapSerializer} - -import org.apache.commons.lang3.ClassUtils - -import java.lang.reflect.ParameterizedType -import java.util - -import org.json4s.{CustomSerializer, DefaultFormats, Extraction} -import org.json4s.JsonAST.JObject -import org.json4s.JsonDSL._ -import org.json4s.jackson.Serialization -import org.json4s.reflect.ManifestFactory - -object RPCFormatsTest { - - trait ResultResource - class AvailableResource(val ticketId: String) extends ResultResource - - object ResultResourceSerializer - extends CustomSerializer[ResultResource](implicit formats => - ( - { case JObject(List(("AvailableResource", JObject(List(("ticketId", ticketId)))))) => - new AvailableResource(ticketId.extract[String]) - }, - { case r: AvailableResource => - ("AvailableResource", ("ticketId", Extraction.decompose(r.ticketId))) - } - ) - ) - - def testRPC1(args: Array[String]): Unit = { - implicit val formats = DefaultFormats + ResultResourceSerializer - val serializerClasses = formats.customSerializers - .map(_.getClass.getGenericSuperclass match { - case p: ParameterizedType => - val params = p.getActualTypeArguments - if (params == null || params.isEmpty) null - else params(0).asInstanceOf[Class[_]] - }) - .filter(_ != null) - val a = new AvailableResource("aaa") - val str = Serialization.write(a) - println(str) - val clazz = classOf[AvailableResource] - println(serializerClasses) - val realClass1 = serializerClasses.find(ClassUtils.isAssignable(clazz, _)) - println(realClass1) - val realClass = realClass1.getOrElse(clazz) - val obj = Serialization.read(str)(formats, ManifestFactory.manifestOf(realClass)) - println(obj) - println(classOf[Array[_]].getClass.getName) - } - - case class TestCollection1(a: String, list: java.util.List[String]) - case class TestCollection2(a: String, list: java.util.Map[String, Integer]) - - def testRPC2(args: Array[String]): Unit = { - implicit val formats = DefaultFormats + JavaCollectionSerializer + JavaMapSerializer - // val a = TestCollection1("1", new util.ArrayList[String]()) - val a = TestCollection2("1", new util.HashMap[String, Integer]()) - // a.list.add("1111") - a.list.put("1111", 2) - val str = Serialization.write(a) - println(str) - val realClass = classOf[TestCollection2] - val obj = Serialization.read(str)(formats, ManifestFactory.manifestOf(realClass)) - println(obj) - } - - def main(args: Array[String]): Unit = { - testRPC2(args) - } - -} diff --git a/linkis-commons/linkis-rpc/src/test/scala/org/apache/linkis/rpc/conf/RPCConfigurationTest.scala b/linkis-commons/linkis-rpc/src/test/scala/org/apache/linkis/rpc/conf/RPCConfigurationTest.scala index 5aaadd133c0..ed1d39ce8ce 100644 --- a/linkis-commons/linkis-rpc/src/test/scala/org/apache/linkis/rpc/conf/RPCConfigurationTest.scala +++ b/linkis-commons/linkis-rpc/src/test/scala/org/apache/linkis/rpc/conf/RPCConfigurationTest.scala @@ -37,7 +37,6 @@ class RPCConfigurationTest { val enablepublicservice = RPCConfiguration.ENABLE_PUBLIC_SERVICE.getValue val publicserviceapplicationname = RPCConfiguration.PUBLIC_SERVICE_APPLICATION_NAME.getValue val publicservicelist = RPCConfiguration.PUBLIC_SERVICE_LIST - val metadataqueryservicelist = RPCConfiguration.METADATAQUERY_SERVICE_LIST Assertions.assertTrue(25 == bdprpcbroadcastthreadsize.intValue()) Assertions.assertTrue(400 == bdprpcreceiverasynconsumerthreadmax.intValue()) @@ -48,7 +47,6 @@ class RPCConfigurationTest { Assertions.assertTrue(enablepublicservice) Assertions.assertEquals("linkis-ps-publicservice", publicserviceapplicationname) Assertions.assertTrue(publicservicelist.size > 0) - Assertions.assertTrue(metadataqueryservicelist.size > 0) } diff --git a/linkis-commons/linkis-scheduler/src/main/java/org/apache/linkis/scheduler/util/SchedulerUtils.java b/linkis-commons/linkis-scheduler/src/main/java/org/apache/linkis/scheduler/util/SchedulerUtils.java new file mode 100644 index 00000000000..62191aa20e4 --- /dev/null +++ b/linkis-commons/linkis-scheduler/src/main/java/org/apache/linkis/scheduler/util/SchedulerUtils.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.scheduler.util; + +import org.apache.linkis.scheduler.conf.SchedulerConfiguration; + +import org.apache.commons.lang3.StringUtils; + +public class SchedulerUtils { + private static final String EVENT_ID_SPLIT = "_"; + private static final String ALL_CREATORS = "ALL_CREATORS"; + private static final String SPACIAL_USER_SPLIT = "_v_"; + + /** + * support priority queue with config username or creator + * + * @param groupName + * @return + */ + public static boolean isSupportPriority(String groupName) { + String users = SchedulerConfiguration.SUPPORT_PRIORITY_TASK_USERS(); + if (StringUtils.isEmpty(users)) { + return false; + } + String userName = getUserFromGroupName(groupName); + if (StringUtils.isEmpty(userName)) { + return false; + } + String creators = SchedulerConfiguration.SUPPORT_PRIORITY_TASK_CREATORS(); + creators = creators.toLowerCase(); + users = users.toLowerCase(); + if (ALL_CREATORS.equalsIgnoreCase(creators)) { + return users.contains(userName.toLowerCase()); + } else { + String creatorName = getCreatorFromGroupName(groupName); + return users.contains(userName.toLowerCase()) && creators.contains(creatorName.toLowerCase()); + } + } + + public static String getUserFromGroupName(String groupName) { + if (groupName.contains(SPACIAL_USER_SPLIT)) { + int vIndex = groupName.lastIndexOf(SPACIAL_USER_SPLIT); + int lastIndex = groupName.lastIndexOf(EVENT_ID_SPLIT); + String user = groupName.substring(vIndex + 1, lastIndex); + return user; + } + String[] groupNames = groupName.split(EVENT_ID_SPLIT); + String user = groupNames[groupNames.length - 2]; + return user; + } + + public static String getEngineTypeFromGroupName(String groupName) { + String[] groupNames = groupName.split(EVENT_ID_SPLIT); + String ecType = groupNames[groupNames.length - 1]; + return ecType; + } + + public static String getCreatorFromGroupName(String groupName) { + if (groupName.contains(SPACIAL_USER_SPLIT)) { + int vIndex = groupName.lastIndexOf(SPACIAL_USER_SPLIT); + String creatorName = groupName.substring(0, vIndex); + return creatorName; + } + int lastIndex = groupName.lastIndexOf(EVENT_ID_SPLIT); + int secondLastIndex = groupName.lastIndexOf(EVENT_ID_SPLIT, lastIndex - 1); + String creatorName = groupName.substring(0, secondLastIndex); + return creatorName; + } +} diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala index de2b81bcaa4..8126ac8847b 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/AbstractScheduler.scala @@ -17,14 +17,15 @@ package org.apache.linkis.scheduler -import org.apache.linkis.common.utils.Utils +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.scheduler.conf.SchedulerConfiguration import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ import org.apache.linkis.scheduler.exception.SchedulerErrorException import org.apache.linkis.scheduler.queue.SchedulerEvent import org.apache.commons.lang3.StringUtils -abstract class AbstractScheduler extends Scheduler { +abstract class AbstractScheduler extends Scheduler with Logging { override def init(): Unit = {} override def start(): Unit = {} @@ -52,6 +53,14 @@ abstract class AbstractScheduler extends Scheduler { val group = getSchedulerContext.getOrCreateGroupFactory.getOrCreateGroup(event) val consumer = getSchedulerContext.getOrCreateConsumerManager.getOrCreateConsumer(group.getGroupName) + logger.info( + s"Consumer ${consumer.getGroup.getGroupName} running size ${consumer.getRunningSize} waiting size ${consumer.getWaitingSize}" + ) + if (consumer.getWaitingSize >= SchedulerConfiguration.MAX_GROUP_ALTER_WAITING_SIZE) { + logger.warn( + s"Group waiting size exceed max alter waiting size ${consumer.getWaitingSize} group name ${consumer.getGroup.getGroupName}" + ) + } val index = consumer.getConsumeQueue.offer(event) index.map(getEventId(_, group.getGroupName)).foreach(event.setId) if (index.isEmpty) { diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala index 8fd6f1c6f01..69c5ab4351d 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/conf/SchedulerConfiguration.scala @@ -25,12 +25,32 @@ object SchedulerConfiguration { CommonVars("wds.linkis.fifo.consumer.auto.clear.enabled", true) val FIFO_CONSUMER_MAX_IDLE_TIME = - CommonVars("wds.linkis.fifo.consumer.max.idle.time", new TimeType("1h")).getValue.toLong + CommonVars("wds.linkis.fifo.consumer.max.idle.time", new TimeType("10m")).getValue.toLong val FIFO_CONSUMER_IDLE_SCAN_INTERVAL = - CommonVars("wds.linkis.fifo.consumer.idle.scan.interval", new TimeType("2h")) + CommonVars("wds.linkis.fifo.consumer.idle.scan.interval", new TimeType("30m")) val FIFO_CONSUMER_IDLE_SCAN_INIT_TIME = CommonVars("wds.linkis.fifo.consumer.idle.scan.init.time", new TimeType("1s")) + val MAX_GROUP_ALTER_WAITING_SIZE = + CommonVars("linkis.fifo.consumer.group.max.alter.waiting.size", 1000).getValue + + // support fifo pfifo + val FIFO_QUEUE_STRATEGY = + CommonVars("linkis.fifo.queue.strategy", "fifo").getValue + + val SUPPORT_PRIORITY_TASK_USERS = + CommonVars("linkis.fifo.queue.support.priority.users", "").getValue + + val SUPPORT_PRIORITY_TASK_CREATORS = + CommonVars("linkis.fifo.queue.support.priority.creators", "ALL_CREATORS").getValue + + val MAX_PRIORITY_QUEUE_CACHE_SIZE = + CommonVars("linkis.fifo.priority.queue.max.cache.size", 1000).getValue + + val ENGINE_PRIORITY_RUNTIME_KEY = "wds.linkis.engine.runtime.priority" + + val PFIFO_SCHEDULER_STRATEGY = "pfifo" + val FIFO_SCHEDULER_STRATEGY = "fifo" } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/executer/ExecuteResponse.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/executer/ExecuteResponse.scala index bcadf99d2c1..0f0e5c481d9 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/executer/ExecuteResponse.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/executer/ExecuteResponse.scala @@ -31,6 +31,9 @@ case class AliasOutputExecuteResponse(alias: String, output: String) extends Out case class ErrorExecuteResponse(message: String, t: Throwable) extends CompletedExecuteResponse +case class ErrorRetryExecuteResponse(message: String, index: Int, t: Throwable) + extends ExecuteResponse + case class IncompleteExecuteResponse(message: String) extends ExecuteResponse case class SubmitResponse(taskId: String) extends ExecuteResponse diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/listener/JobRetryListener.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/listener/JobRetryListener.scala new file mode 100644 index 00000000000..4e95a4604b5 --- /dev/null +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/listener/JobRetryListener.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.scheduler.listener + +import org.apache.linkis.scheduler.queue.Job + +import java.util + +trait JobRetryListener extends SchedulerListener { + + def onJobFailed( + job: Job, + code: String, + props: util.Map[String, AnyRef], + errorCode: Int, + errorDesc: String + ): Boolean + +} diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala index 14c9061777e..7761a9f33b5 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/ConsumeQueue.scala @@ -21,6 +21,8 @@ abstract class ConsumeQueue { def remove(event: SchedulerEvent): Unit def getWaitingEvents: Array[SchedulerEvent] def size: Int + + def waitingSize: Int def isEmpty: Boolean def isFull: Boolean def clearAll(): Unit diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala index 50dce2ca124..539a2a4b1f6 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala @@ -38,10 +38,15 @@ abstract class Consumer(schedulerContext: SchedulerContext, executeService: Exec def getRunningEvents: Array[SchedulerEvent] + def getMaxRunningEvents: Int + + def getRunningSize: Int + + def getWaitingSize: Int + def start(): Unit def shutdown(): Unit = { - logger.info(s"$toString is ready to stop!") terminate = true logger.info(s"$toString stopped!") } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala index f3471b07dd6..be1716f238b 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala @@ -19,6 +19,11 @@ package org.apache.linkis.scheduler.queue abstract class GroupFactory { + /** + * Create a Group and set the concurrency limit of the group + * @param event + * @return + */ def getOrCreateGroup(event: SchedulerEvent): Group def getGroup(groupName: String): Group diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala index 2087153813f..8034841b4ce 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala @@ -22,8 +22,9 @@ import org.apache.linkis.common.listener.ListenerEventBus import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.protocol.engine.JobProgressInfo +import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary.TASK_STATUS_FLIP_ERROR import org.apache.linkis.scheduler.event._ -import org.apache.linkis.scheduler.exception.LinkisJobRetryException +import org.apache.linkis.scheduler.exception.{LinkisJobRetryException, SchedulerErrorException} import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.future.BDPFuture import org.apache.linkis.scheduler.listener._ @@ -33,6 +34,7 @@ import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils import java.io.Closeable +import java.text.MessageFormat import java.util.concurrent.Future abstract class Job extends Runnable with SchedulerEvent with Closeable with Logging { @@ -50,6 +52,7 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg private var executor: Executor = _ private var jobListener: Option[JobListener] = None private var logListener: Option[LogListener] = None + private var jobRetryListener: Option[JobRetryListener] = None private var progressListener: Option[ProgressListener] = None private[linkis] var interrupt = false private var progress: Float = 0f @@ -152,6 +155,12 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg def getLogListener: Option[LogListener] = logListener + def setJobRetryListener(jobRetryListener: JobRetryListener): Unit = this.jobRetryListener = Some( + jobRetryListener + ) + + def getJobRetryListener: Option[JobRetryListener] = jobRetryListener + def setProgressListener(progressListener: ProgressListener): Unit = this.progressListener = Some( progressListener ) @@ -196,23 +205,27 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg ): Unit = toState match { case Inited => jobListener.foreach(_.onJobInited(this)) - // TODO Add event(加事件) case Scheduled => jobListener.foreach(_.onJobScheduled(this)) - logListener.foreach(_.onLogUpdate(this, LogUtils.generateInfo("job is scheduled."))) - // TODO Add event(加事件) case Running => jobListener.foreach(_.onJobRunning(this)) - logListener.foreach(_.onLogUpdate(this, LogUtils.generateInfo("job is running."))) - // TODO job start event case WaitForRetry => jobListener.foreach(_.onJobWaitForRetry(this)) case _ => jobDaemon.foreach(_.kill()) jobListener.foreach(_.onJobCompleted(this)) -// if(getJobInfo != null) logListener.foreach(_.onLogUpdate(this, getJobInfo.getMetric)) - logListener.foreach(_.onLogUpdate(this, LogUtils.generateInfo("job is completed."))) - // TODO job end event + } + + protected def transitionWaitForRetry(): Unit = { + val state: SchedulerEventState = getState + if (state != Failed && state != Running) { + throw new SchedulerErrorException( + TASK_STATUS_FLIP_ERROR.getErrorCode, + MessageFormat.format(TASK_STATUS_FLIP_ERROR.getErrorDesc, state, WaitForRetry) + ) + } + logger.info(s"$toString change status ${state} => ${WaitForRetry}.") + transition(WaitForRetry) } protected def transitionCompleted(executeCompleted: CompletedExecuteResponse): Unit = { @@ -303,6 +316,7 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg } override def run(): Unit = { + Thread.currentThread().setName(s"Job_${toString}_Thread") if (!isScheduled || interrupt) return startTime = System.currentTimeMillis Utils.tryAndWarn(transition(Running)) @@ -351,6 +365,16 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg } override def toString: String = if (StringUtils.isNotBlank(getName)) getName else getId + + /** + * clear job memory + */ + def clear(): Unit = { + logger.info(s" clear job base info $getId") + this.executor = null + this.jobDaemon = null + } + } /** diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala index b0bbfd3c2b0..c18f18de127 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala @@ -40,7 +40,12 @@ class LoopArrayQueue(var group: Group) extends ConsumeQueue with Logging { override def getWaitingEvents: Array[SchedulerEvent] = { eventQueue synchronized { - toIndexedSeq.filter(x => x.getState.equals(SchedulerEventState.Inited)).toArray + toIndexedSeq + .filter(x => + x.getState.equals(SchedulerEventState.Inited) || x.getState + .equals(SchedulerEventState.Scheduled) + ) + .toArray } } @@ -104,7 +109,7 @@ class LoopArrayQueue(var group: Group) extends ConsumeQueue with Logging { max } - def waitingSize: Int = if (takeIndex <= realSize) size + override def waitingSize: Int = if (takeIndex <= realSize) size else { val length = size - takeIndex + realSize if (length < 0) 0 else length diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/PriorityLoopArrayQueue.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/PriorityLoopArrayQueue.scala new file mode 100644 index 00000000000..fd3fecc71b5 --- /dev/null +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/PriorityLoopArrayQueue.scala @@ -0,0 +1,270 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.scheduler.queue + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.scheduler.conf.SchedulerConfiguration + +import java.util +import java.util.Comparator +import java.util.concurrent.PriorityBlockingQueue +import java.util.concurrent.atomic.AtomicInteger +import java.util.concurrent.locks.ReentrantReadWriteLock + +/** + * 优先级队列元素 + * @param element + * 实际元素 + * @param priority + * 优先级 + * @param index + * 唯一索引 + */ +case class PriorityQueueElement(element: Any, priority: Int, index: Int) + +/** + * 固定大小集合,元素满后会移除最先插入集合的元素 + * @param maxSize + * 集合大小 + * @tparam K + * @tparam V + */ +class FixedSizeCollection[K, V](val maxSize: Int) extends util.LinkedHashMap[K, V] { + // 当集合大小超过最大值时,返回true,自动删除最老的元素 + protected override def removeEldestEntry(eldest: util.Map.Entry[K, V]): Boolean = size > maxSize +} + +/** + * 优先级队列,优先级相同时先进先出 + * @param group + */ +class PriorityLoopArrayQueue(var group: Group) extends ConsumeQueue with Logging { + + private val maxCapacity = group.getMaximumCapacity + + /** 优先级队列 */ + private val priorityEventQueue = new PriorityBlockingQueue[PriorityQueueElement]( + group.getMaximumCapacity, + new Comparator[PriorityQueueElement] { + + override def compare(o1: PriorityQueueElement, o2: PriorityQueueElement): Int = + if (o1.priority != o2.priority) o2.priority - o1.priority + else o1.index - o2.index + + } + ) + + /** 累加器 1.越先进队列值越小,优先级相同时控制先进先出 2.队列元素唯一索引,不会重复 */ + private val index = new AtomicInteger + + /** 记录队列中当前所有元素索引,元素存入优先级队列时添加,从优先级队列移除时删除 */ + private val indexMap = new util.HashMap[Int, SchedulerEvent]() + + /** 记录已经消费的元素,会有固定缓存大小,默认1000,元素从优先级队列移除时添加 */ + private val fixedSizeCollection = + new FixedSizeCollection[Integer, SchedulerEvent]( + SchedulerConfiguration.MAX_PRIORITY_QUEUE_CACHE_SIZE + ) + + private val rwLock = new ReentrantReadWriteLock + + protected[this] var realSize = size + override def isEmpty: Boolean = size <= 0 + override def isFull: Boolean = size >= maxCapacity + def size: Int = priorityEventQueue.size + + /** + * 将元素添加进队列 + * @param element + * @return + */ + private def addToPriorityQueue(element: PriorityQueueElement): Boolean = { + priorityEventQueue.offer(element) + rwLock.writeLock.lock + Utils.tryFinally(indexMap.put(element.index, element.element.asInstanceOf[SchedulerEvent]))( + rwLock.writeLock.unlock() + ) + true + } + + /** + * 从队列中获取并移除元素 + * @return + */ + private def getAndRemoveTop: SchedulerEvent = { + val top: PriorityQueueElement = priorityEventQueue.take() + rwLock.writeLock.lock + Utils.tryFinally { + indexMap.remove(top.index) + fixedSizeCollection.put(top.index, top.element.asInstanceOf[SchedulerEvent]) + }(rwLock.writeLock.unlock()) + top.element.asInstanceOf[SchedulerEvent] + } + + override def remove(event: SchedulerEvent): Unit = { + get(event).foreach(x => x.cancel()) + } + + override def getWaitingEvents: Array[SchedulerEvent] = { + toIndexedSeq + .filter(x => + x.getState.equals(SchedulerEventState.Inited) || x.getState + .equals(SchedulerEventState.Scheduled) + ) + .toArray + } + + override def clearAll(): Unit = priorityEventQueue synchronized { + realSize = 0 + index.set(0) + priorityEventQueue.clear() + fixedSizeCollection.clear() + indexMap.clear() + } + + override def get(event: SchedulerEvent): Option[SchedulerEvent] = { + val eventSeq = toIndexedSeq.filter(x => x.getId.equals(event.getId)).seq + if (eventSeq.size > 0) Some(eventSeq(0)) else None + } + + /** + * 根据索引获取队列元素 + * @param index + * @return + */ + override def get(index: Int): Option[SchedulerEvent] = { + if (!indexMap.containsKey(index) && !fixedSizeCollection.containsKey(index)) { + throw new IllegalArgumentException( + "The index " + index + " has already been deleted, now index must be better than " + index + ) + } + rwLock.readLock().lock() + Utils.tryFinally { + if (fixedSizeCollection.get(index) != null) Option(fixedSizeCollection.get(index)) + else Option(indexMap.get(index)) + }(rwLock.readLock().unlock()) + } + + override def getGroup: Group = group + + override def setGroup(group: Group): Unit = { + this.group = group + } + + def toIndexedSeq: IndexedSeq[SchedulerEvent] = if (size == 0) { + IndexedSeq.empty[SchedulerEvent] + } else { + priorityEventQueue + .toArray() + .map(_.asInstanceOf[PriorityQueueElement].element.asInstanceOf[SchedulerEvent]) + .toIndexedSeq + } + + def add(event: SchedulerEvent): Int = { + // 每次添加的时候需要给计数器+1,优先级相同时,控制先进先出 + event.setIndex(index.addAndGet(1)) + addToPriorityQueue(PriorityQueueElement(event, event.getPriority, event.getIndex)) + event.getIndex + } + + override def waitingSize: Int = size + + /** + * Add one, if the queue is full, it will block until the queue is + * available(添加一个,如果队列满了,将会一直阻塞,直到队列可用) + * + * @return + * Return index subscript(返回index下标) + */ + override def put(event: SchedulerEvent): Int = { + add(event) + } + + /** + * Add one, return None if the queue is full(添加一个,如果队列满了,返回None) + * + * @return + */ + override def offer(event: SchedulerEvent): Option[Int] = { + if (isFull) None else Some(add(event)) + } + + /** + * Get the latest SchedulerEvent of a group, if it does not exist, it will block + * [
(获取某个group最新的SchedulerEvent,如果不存在,就一直阻塞
) This method will move the pointer(该方法会移动指针) + * + * @return + */ + override def take(): SchedulerEvent = { + getAndRemoveTop + } + + /** + * Get the latest SchedulerEvent of a group, if it does not exist, block the maximum waiting + * time
(获取某个group最新的SchedulerEvent,如果不存在,就阻塞到最大等待时间
) This method will move the + * pointer(该方法会移动指针) + * @param mills + * Maximum waiting time(最大等待时间) + * @return + */ + override def take(mills: Long): Option[SchedulerEvent] = { + if (waitingSize == 0) { + Thread.sleep(mills) + } + if (waitingSize == 0) None else Option(getAndRemoveTop) + } + + /** + * Get the latest SchedulerEvent of a group and move the pointer to the next one. If not, return + * directly to None 获取某个group最新的SchedulerEvent,并移动指针到下一个。如果没有,直接返回None + * + * @return + */ + override def poll(): Option[SchedulerEvent] = { + if (waitingSize == 0) None + else Option(getAndRemoveTop) + } + + /** + * Only get the latest SchedulerEvent of a group, and do not move the pointer. If not, return + * directly to None 只获取某个group最新的SchedulerEvent,并不移动指针。如果没有,直接返回None + * + * @return + */ + override def peek(): Option[SchedulerEvent] = { + val ele: PriorityQueueElement = priorityEventQueue.peek() + if (ele == null) None else Option(ele.element.asInstanceOf[SchedulerEvent]) + } + + /** + * Get the latest SchedulerEvent whose group satisfies the condition and does not move the + * pointer. If not, return directly to None 获取某个group满足条件的最新的SchedulerEvent,并不移动指针。如果没有,直接返回None + * @param op + * 满足的条件 + * @return + */ + override def peek(op: (SchedulerEvent) => Boolean): Option[SchedulerEvent] = { + val ele: PriorityQueueElement = priorityEventQueue.peek() + if (ele == null) return None + val event: Option[SchedulerEvent] = Option( + priorityEventQueue.peek().element.asInstanceOf[SchedulerEvent] + ) + if (op(event.get)) event else None + } + +} diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEvent.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEvent.scala index 4f384d23840..3e87a069305 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEvent.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEvent.scala @@ -32,9 +32,13 @@ trait SchedulerEvent extends Logging { protected var scheduledTime: Long = 0L protected var startTime: Long = 0L protected var endTime: Long = 0L + protected var priority: Int = 100 + protected var index: Int = 0 def getEndTime: Long = endTime def getStartTime: Long = startTime + def getPriority: Int = priority + def getIndex: Int = index /* * To be compatible with old versions. @@ -50,6 +54,14 @@ trait SchedulerEvent extends Logging { this synchronized notify() } + def setPriority(priority: Int): Unit = { + this.priority = priority + } + + def setIndex(index: Int): Unit = { + this.index = index + } + def turnToScheduled(): Boolean = if (!isWaiting) { false } else { diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOConsumerManager.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOConsumerManager.scala index e95e172e066..02091e4f79e 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOConsumerManager.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOConsumerManager.scala @@ -19,10 +19,11 @@ package org.apache.linkis.scheduler.queue.fifoqueue import org.apache.linkis.common.utils.Utils import org.apache.linkis.scheduler.SchedulerContext +import org.apache.linkis.scheduler.conf.SchedulerConfiguration.FIFO_QUEUE_STRATEGY import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ import org.apache.linkis.scheduler.exception.SchedulerErrorException import org.apache.linkis.scheduler.listener.ConsumerListener -import org.apache.linkis.scheduler.queue.{Consumer, ConsumerManager, Group, LoopArrayQueue} +import org.apache.linkis.scheduler.queue._ import java.text.MessageFormat import java.util.concurrent.{ExecutorService, ThreadPoolExecutor} @@ -34,7 +35,7 @@ class FIFOConsumerManager(groupName: String) extends ConsumerManager { private var group: Group = _ private var executorService: ThreadPoolExecutor = _ private var consumerListener: ConsumerListener = _ - private var consumerQueue: LoopArrayQueue = _ + private var consumerQueue: ConsumeQueue = _ private var consumer: Consumer = _ override def setSchedulerContext(schedulerContext: SchedulerContext): Unit = { diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala index 2a40c2517be..91340bf5c7a 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala @@ -21,6 +21,7 @@ import org.apache.linkis.common.exception.{ErrorException, WarnException} import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.Utils import org.apache.linkis.scheduler.SchedulerContext +import org.apache.linkis.scheduler.conf.SchedulerConfiguration import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ import org.apache.linkis.scheduler.exception.SchedulerErrorException import org.apache.linkis.scheduler.executer.Executor @@ -73,6 +74,8 @@ class FIFOUserConsumer( override def getRunningEvents: Array[SchedulerEvent] = getEvents(e => e.isRunning || e.isWaitForRetry) + protected def getSchedulerContext: SchedulerContext = schedulerContext + private def getEvents(op: SchedulerEvent => Boolean): Array[SchedulerEvent] = { val result = ArrayBuffer[SchedulerEvent]() runningJobs.filter(_ != null).filter(x => op(x)).foreach(result += _) @@ -82,21 +85,34 @@ class FIFOUserConsumer( override def run(): Unit = { Thread.currentThread().setName(s"${toString}Thread") logger.info(s"$toString thread started!") - while (!terminate) { - Utils.tryAndError(loop()) - Utils.tryAndError(Thread.sleep(10)) + while (!terminate) Utils.tryAndError { + loop() + Thread.sleep(10) } logger.info(s"$toString thread stopped!") } protected def askExecutorGap(): Unit = {} + /** + * Task scheduling interception is used to judge the rules of task operation, and to judge other + * task rules based on Group. For example, Entrance makes Creator-level task judgment. + */ + protected def runScheduleIntercept(): Boolean = { + true + } + protected def loop(): Unit = { + if (!runScheduleIntercept()) { + Utils.tryQuietly(Thread.sleep(1000)) + return + } var isRetryJob = false def getWaitForRetryEvent: Option[SchedulerEvent] = { val waitForRetryJobs = runningJobs.filter(job => job != null && job.isJobCanRetry) waitForRetryJobs.find { job => isRetryJob = Utils.tryCatch(job.turnToRetry()) { t => + logger.info("Job state flipped to Scheduled failed in Retry(Retry时,job状态翻转为Scheduled失败)!") job.onFailure( "Job state flipped to Scheduled failed in Retry(Retry时,job状态翻转为Scheduled失败)!", t @@ -110,7 +126,7 @@ class FIFOUserConsumer( if (event.isEmpty) { val completedNums = runningJobs.filter(job => job == null || job.isCompleted) if (completedNums.length < 1) { - Utils.tryQuietly(Thread.sleep(1000)) // TODO 还可以优化,通过实现JobListener进行优化 + Utils.tryQuietly(Thread.sleep(1000)) return } while (event.isEmpty) { @@ -119,7 +135,12 @@ class FIFOUserConsumer( if ( takeEvent.exists(e => Utils.tryCatch(e.turnToScheduled()) { t => - takeEvent.get.asInstanceOf[Job].onFailure("Job状态翻转为Scheduled失败!", t) + takeEvent.get + .asInstanceOf[Job] + .onFailure( + "Failed to change the job status to Scheduled(Job状态翻转为Scheduled失败)", + t + ) false } ) @@ -128,7 +149,9 @@ class FIFOUserConsumer( } else getWaitForRetryEvent } } + event.foreach { case job: Job => + logger.info(s"event not empty ${job.getState} id: ${job.getId()}") Utils.tryCatch { val (totalDuration, askDuration) = (fifoGroup.getMaxAskExecutorDuration, fifoGroup.getAskExecutorInterval) @@ -157,6 +180,9 @@ class FIFOUserConsumer( totalDuration ) job.consumerFuture = null + logger.info( + s"FIFOUserConsumer ${getGroup.getGroupName} running size ${getRunningSize} waiting size ${getWaitingSize}" + ) executor.foreach { executor => job.setExecutor(executor) job.future = executeService.submit(job) @@ -174,7 +200,7 @@ class FIFOUserConsumer( ) ) case error: Throwable => - job.onFailure("请求引擎失败,可能是由于后台进程错误!请联系管理员", error) + job.onFailure("Failed to request EngineConn", error) if (job.isWaitForRetry) { logger.warn(s"Ask executor for Job $job failed, wait for the next retry!", error) if (!isRetryJob) putToRunningJobs(job) @@ -190,6 +216,22 @@ class FIFOUserConsumer( override def shutdown(): Unit = { future.cancel(true) + val waitEvents = queue.getWaitingEvents + if (waitEvents.nonEmpty) { + waitEvents.foreach { + case job: Job => + job.onFailure("Your job will be marked as canceled because the consumer be killed", null) + case _ => + } + } + // clear cache + queue.clearAll() + + this.runningJobs.foreach { job => + if (job != null && !job.isCompleted) { + job.onFailure("Your job will be marked as canceled because the consumer be killed", null) + } + } super.shutdown() } @@ -205,4 +247,14 @@ class FIFOUserConsumer( this.queue.peek.isEmpty && !this.runningJobs.exists(job => job != null && !job.isCompleted) } + override def getMaxRunningEvents: Int = this.maxRunningJobsNum + + override def getRunningSize: Int = { + runningJobs.count(job => job != null && !job.isCompleted) + } + + override def getWaitingSize: Int = { + queue.waitingSize + } + } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala index 396b6fb3153..777adc89e3a 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala @@ -19,14 +19,24 @@ package org.apache.linkis.scheduler.queue.parallelqueue import org.apache.linkis.common.utils.{ByteTimeUtils, Logging, Utils} import org.apache.linkis.scheduler.conf.SchedulerConfiguration +import org.apache.linkis.scheduler.conf.SchedulerConfiguration.{ + FIFO_QUEUE_STRATEGY, + PFIFO_SCHEDULER_STRATEGY +} import org.apache.linkis.scheduler.listener.ConsumerListener import org.apache.linkis.scheduler.queue._ import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer +import org.apache.linkis.scheduler.util.SchedulerUtils.isSupportPriority import java.util.concurrent.{ExecutorService, TimeUnit} import scala.collection.mutable +/** + * @param maxParallelismUsers + * Consumer Thread pool size is:5 * maxParallelismUsers + 1 + * @param schedulerName + */ class ParallelConsumerManager(maxParallelismUsers: Int, schedulerName: String) extends ConsumerManager with Logging { @@ -106,7 +116,16 @@ class ParallelConsumerManager(maxParallelismUsers: Int, schedulerName: String) val newConsumer = createConsumer(groupName) val group = getSchedulerContext.getOrCreateGroupFactory.getGroup(groupName) newConsumer.setGroup(group) - newConsumer.setConsumeQueue(new LoopArrayQueue(group)) + // 需要判断人员是否是指定部门 + val consumerQueue: ConsumeQueue = + if ( + PFIFO_SCHEDULER_STRATEGY + .equalsIgnoreCase(FIFO_QUEUE_STRATEGY) && isSupportPriority(groupName) + ) { + logger.info(s"use priority queue: ${groupName}") + new PriorityLoopArrayQueue(group) + } else new LoopArrayQueue(group) + newConsumer.setConsumeQueue(consumerQueue) consumerListener.foreach(_.onConsumerCreated(newConsumer)) newConsumer.start() newConsumer @@ -126,8 +145,8 @@ class ParallelConsumerManager(maxParallelismUsers: Int, schedulerName: String) override def destroyConsumer(groupName: String): Unit = consumerGroupMap.get(groupName).foreach { tmpConsumer => - tmpConsumer.shutdown() - consumerGroupMap.remove(groupName) + Utils.tryAndWarn(tmpConsumer.shutdown()) + Utils.tryAndWarn(consumerGroupMap.remove(groupName)) consumerListener.foreach(_.onConsumerDestroyed(tmpConsumer)) logger.warn(s"Consumer of group ($groupName) in $schedulerName is destroyed.") } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala index 99fa57bad4e..5b060a994f8 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/parallelqueue/ParallelSchedulerContextImpl.scala @@ -25,8 +25,14 @@ class ParallelSchedulerContextImpl(override val maxParallelismUsers: Int) extends FIFOSchedulerContextImpl(maxParallelismUsers) with Logging { + /** + * Set the number of consumption groups supported The number of concurrency supported by each + * group is determined by + * org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroupFactory#setDefaultMaxRunningJobs(int) + */ override protected def createGroupFactory(): GroupFactory = { val groupFactory = new ParallelGroupFactory + groupFactory.setParallelism(maxParallelismUsers) groupFactory } diff --git a/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala index 97172dab940..a3154ef0824 100644 --- a/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala +++ b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/conf/SchedulerConfigurationTest.scala @@ -35,8 +35,6 @@ class SchedulerConfigurationTest { ) val fifoConsumerMaxIdleTime = CommonVars("wds.linkis.fifo.consumer.max.idle.time", new TimeType("1h")).getValue.toLong - assertEquals(SchedulerConfiguration.FIFO_CONSUMER_MAX_IDLE_TIME, fifoConsumerMaxIdleTime) - assertEquals(SchedulerConfiguration.FIFO_CONSUMER_IDLE_SCAN_INTERVAL.getValue.toLong, 7200000) val fifoConsumerIdleScanInitTime = CommonVars("wds.linkis.fifo.consumer.idle.scan.init.time", new TimeType("1s")).getValue.toLong assertEquals( diff --git a/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/queue/PriorityLoopArrayQueueTest.java b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/queue/PriorityLoopArrayQueueTest.java new file mode 100644 index 00000000000..52eb3e8e8ed --- /dev/null +++ b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/queue/PriorityLoopArrayQueueTest.java @@ -0,0 +1,206 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.scheduler.queue; + +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroup; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import scala.Option; + +import java.util.Random; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; + +class PriorityLoopArrayQueueTest { + AtomicInteger productCounter = new AtomicInteger(); + AtomicInteger consumerCounter = new AtomicInteger(); + Random rand = new Random(); + + @Test + public void testConcurrentPutAndTake() throws Exception { + AtomicInteger counter = new AtomicInteger(); + FIFOGroup group = new FIFOGroup("test", 5000, 5000); + PriorityLoopArrayQueue queue = new PriorityLoopArrayQueue(group); + + // 获取开始时间的毫秒数 + long startTime = System.currentTimeMillis(); + // 三分钟的毫秒数 + long threeMinutesInMillis = 30 * 1000L; + int genLen = 5; + int getLen = 7; + final CountDownLatch latch = new CountDownLatch(genLen + getLen + 1); + // 5 个生产者 + for (int i = 0; i < genLen; i++) { + final int id = i; + new Thread(() -> { + try{ + Thread.sleep(100L * id); + latch.countDown(); + latch.await(); + } catch (InterruptedException e){ + e.printStackTrace(); + } + System.out.println(Thread.currentThread().getName() + "开始生产:"); + while ((System.currentTimeMillis() - startTime) < threeMinutesInMillis) { + //生产 + try { + Thread.sleep(getRandom(200)); + product(counter, queue); + product(counter, queue); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + //消费 + //consume(queue); + } + System.out.println(Thread.currentThread().getName() + "结束生产:"); + }, "生产t-" + i).start(); + } + // 5 个消费者 + for (int i = 0; i < getLen; i++) { + final int id = i; + new Thread(() -> { + try{ + Thread.sleep(getRandom(200)); + latch.countDown(); + latch.await(); + } catch (InterruptedException e){ + e.printStackTrace(); + } + System.out.println(Thread.currentThread().getName() + "开始消费:"); + while (true) { + try { + Thread.sleep(getRandom(200)); + //消费 + consume(queue); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + }, "消费t-" + i).start(); + } + new Thread(() -> { + try { + Thread.sleep(100); + latch.countDown(); + latch.await(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + System.out.println(Thread.currentThread().getName() + "开始获取当前队列元素:"); + while ((System.currentTimeMillis() - startTime) < threeMinutesInMillis * 2) { + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + System.out.println("生产大小:" + productCounter.get()); + System.out.println("消费大小:" + consumerCounter.get()); + System.out.println("队列当前大小:" + queue.size()); + // 需要 去掉私有测试 + //System.out.println("index size: " + queue.indexMap().size()); + //System.out.println("cache size: " + queue.fixedSizeCollection().size()); + } + }).start(); + Thread.sleep(threeMinutesInMillis * 2); + System.out.println("product:" + productCounter.get() + ", consumer: " + consumerCounter.get()); + // 需要 去掉私有测试 + //Assertions.assertEquals(1000, queue.fixedSizeCollection().size()); + Assertions.assertEquals(productCounter.get(), consumerCounter.get()); + } + + //消费 + private void consume(PriorityLoopArrayQueue queue) { + SchedulerEvent take = null; + try { + take = queue.take(); + consumerCounter.addAndGet(1); + } catch (Exception e) { + throw new RuntimeException(e); + } + printEvent("消费" , take); + } + + //生产 + private void product(AtomicInteger counter, PriorityLoopArrayQueue queue) { + int i1 = counter.addAndGet(1); + //1000-重要,100-普通,10-不重要 + int[] proArr = {1000, 100, 10}; + int priority = getRandom(3); + String name = "item-" + i1 + "-" + priority; + System.out.println("生产:" + name); + Option offer = queue.offer(getJob(name, proArr[priority])); + if (offer.nonEmpty()) { + productCounter.addAndGet(1); + Option schedulerEventOption = queue.get((int) offer.get()); + printEvent("get:", schedulerEventOption.get()); + } else { + System.out.println("当前队列已满,大小:" + queue.size()); + } + } + @Test + void testFinally() { + + } + @Test + void enqueue() { + // 压测 offer take get + FIFOGroup group = new FIFOGroup("test", 100, 100); + PriorityLoopArrayQueue queue = new PriorityLoopArrayQueue(group); + Option idx = queue.offer(getJob("job1-1", 1)); + //插入测试 + Assertions.assertEquals(1, (int)idx.get()); + queue.offer(getJob("job2", 2)); + queue.offer(getJob("job3", 3)); + queue.offer(getJob("job1-2", 1)); + queue.offer(getJob("job5", 5)); + queue.offer(getJob("item1-3", 1)); + queue.offer(getJob("item6-1", 6)); + queue.offer(getJob("item4", 4)); + queue.offer(getJob("item6-2", 6)); + //peek 测试 + Option peek = queue.peek(); + Assertions.assertEquals("item6-1", peek.get().getId()); + while (queue.size() > 1) { + queue.take(); + } + SchedulerEvent event = queue.take(); + //优先级,以及先进先出测试 + Assertions.assertEquals("item1-3", event.getId()); + Assertions.assertEquals(1, event.priority()); + Assertions.assertEquals(6, event.getIndex()); + //缓存测试,需要设置 linkis.fifo.priority.queue.max.cache.size 为 5 +// Assertions.assertThrows(IllegalArgumentException.class, () -> {queue.get(7);}); + + } + + private void printEvent(String opt, SchedulerEvent event) { + System.out.println("【" + Thread.currentThread().getName() + "】" + opt + ":" + event.getId() + ", priority: " + event.getPriority() + ", index: " + event.getIndex()); + } + private int getRandom(int bound){ + int res = rand.nextInt(bound); + return res; + } + private UserJob getJob(String name, int priority) { + UserJob job = new UserJob(); + job.setId(name); + job.setPriority(priority); + return job; + } +} \ No newline at end of file diff --git a/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/queue/Test.scala b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/queue/Test.scala new file mode 100644 index 00000000000..e8340994007 --- /dev/null +++ b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/queue/Test.scala @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.scheduler.queue + +import java.util +import java.util.{PriorityQueue, Queue} + +case class PriorityFIFOQueue() { + case class QueueItem(item: Queue[String], priority: Int) + + import java.util.Comparator + + val cNode: Comparator[QueueItem] = new Comparator[QueueItem]() { + override def compare(o1: QueueItem, o2: QueueItem): Int = o2.priority - o1.priority + } + + private val queue = new PriorityQueue[QueueItem](cNode) + private var _size = 0 + private var _count: Long = 0L + + def size: Int = _size + + def isEmpty: Boolean = _size == 0 + + def enqueue(item: String, priority: Int): Unit = { + val deque = new util.ArrayDeque[String]() + deque.add(item) + queue.add(QueueItem(deque, priority)) + } + +} diff --git a/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/util/TestSchedulerUtils.scala b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/util/TestSchedulerUtils.scala new file mode 100644 index 00000000000..48d7a689221 --- /dev/null +++ b/linkis-commons/linkis-scheduler/src/test/scala/org/apache/linkis/scheduler/util/TestSchedulerUtils.scala @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.scheduler.util + +import org.apache.linkis.scheduler.util.SchedulerUtils.{ + getCreatorFromGroupName, + getEngineTypeFromGroupName, + getUserFromGroupName, + isSupportPriority +} + +import org.junit.jupiter.api.{Assertions, Test} + +class TestSchedulerUtils { + + @Test + def testIsSupportPriority: Unit = { + // set linkis.fifo.queue.support.priority.users=hadoop + // set linkis.fifo.queue.support.priority.creators=IDE or ALL_CREATORS + val bool: Boolean = isSupportPriority("IdE_haDoop_hive") + Assertions.assertEquals(false, bool) + } + + @Test + def testShellDangerCode: Unit = { + var groupName = "IDE_hadoop_hive" + var username: String = getUserFromGroupName(groupName) + var engineType: String = getEngineTypeFromGroupName(groupName) + var creator: String = getCreatorFromGroupName(groupName) + Assertions.assertEquals("hadoop", username) + Assertions.assertEquals("hive", engineType) + Assertions.assertEquals("IDE", creator) + groupName = "APP_TEST_v_hadoop_hive" + username = getUserFromGroupName(groupName) + engineType = getEngineTypeFromGroupName(groupName) + creator = getCreatorFromGroupName(groupName) + Assertions.assertEquals("v_hadoop", username) + Assertions.assertEquals("hive", engineType) + Assertions.assertEquals("APP_TEST", creator) + + groupName = "TEST_v_hadoop_hive" + username = getUserFromGroupName(groupName) + engineType = getEngineTypeFromGroupName(groupName) + creator = getCreatorFromGroupName(groupName) + Assertions.assertEquals("v_hadoop", username) + Assertions.assertEquals("hive", engineType) + Assertions.assertEquals("TEST", creator) + + groupName = "APP_TEST_hadoop_hive" + username = getUserFromGroupName(groupName) + engineType = getEngineTypeFromGroupName(groupName) + creator = getCreatorFromGroupName(groupName) + Assertions.assertEquals("hadoop", username) + Assertions.assertEquals("hive", engineType) + Assertions.assertEquals("APP_TEST", creator) + } + +} diff --git a/linkis-commons/linkis-storage/pom.xml b/linkis-commons/linkis-storage/pom.xml index 158828fc297..9ae2ae4ee0e 100644 --- a/linkis-commons/linkis-storage/pom.xml +++ b/linkis-commons/linkis-storage/pom.xml @@ -57,19 +57,6 @@ ${protobuf.version} - - org.json4s - json4s-jackson_${scala.binary.version} - ${json4s.version} - provided - - - org.scala-lang - scala-library - - - - org.springframework spring-core diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java index 9ca34258375..7187f72ce57 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java @@ -37,6 +37,14 @@ public enum LinkisStorageErrorCodeSummary implements LinkisErrorCode { 52004, "You must register IOMethodInterceptorCreator before you can use proxy mode.(必须先注册IOMethodInterceptorCreator,才能使用代理模式)"), UNSUPPORTED_OPEN_FILE_TYPE(54001, "Unsupported open file type(不支持打开的文件类型)"), + + RESULT_COL_LENGTH(52003, "Col value length {0} exceed limit {1}"), + + RESULT_COLUMN_INDEX_OUT_OF_BOUNDS(52004, "Column index value {0} exceed limit {1}"), + + RESULT_ROW_LENGTH(520034, "Row value length {0} exceed limit {1}"), + + RESULT_COL_SIZE(520035, "Col size length {0} exceed limit {1}"), INVALID_CUSTOM_PARAMETER(65000, "Invalid custom parameter(不合法的自定义参数)"); /** 错误码 */ diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java index 2e3ca6e085a..910c3d78171 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java @@ -37,7 +37,7 @@ public static List> getExcelTitle( } else { res = XlsxUtils.getBasicInfo(in, file); } - if (res == null && res.size() < 2) { + if (res == null || res.size() < 2) { throw new Exception("There is a problem with the file format(文件格式有问题)"); } List headerType = new ArrayList<>(); diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java index f3b8f448655..39d89c3d963 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java @@ -21,12 +21,17 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.apache.poi.openxml4j.util.ZipSecureFile; +import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.Row; +import org.apache.poi.ss.usermodel.Sheet; +import org.apache.poi.ss.usermodel.Workbook; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; +import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,6 +66,7 @@ public static String excelToCsv( throws Exception { String hdfsPath = "/tmp/" + StorageUtils.getJvmUser() + "/" + System.currentTimeMillis() + ".csv"; + LOG.info("The excel to csv with hdfs path:" + hdfsPath); ExcelXlsReader xlsReader = new ExcelXlsReader(); RowToCsvDeal rowToCsvDeal = new RowToCsvDeal(); OutputStream out = null; @@ -80,4 +86,44 @@ public static String excelToCsv( } return hdfsPath; } + + public static Map>> getSheetsInfo( + InputStream inputStream, Boolean hasHeader) { + // use xls file + Workbook workbook = null; + try { + // 压缩膨胀比率,处理excel行或者列过多的情况,不能设置再小了,会导致内存过大 + ZipSecureFile.setMinInflateRatio(0.005); + workbook = new HSSFWorkbook(inputStream); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + // 使用完最后需要还原 + ZipSecureFile.setMinInflateRatio(0.01); + } + Map>> res = new LinkedHashMap<>(workbook.getNumberOfSheets()); + // foreach Sheet + for (int i = 0; i < workbook.getNumberOfSheets(); i++) { + Sheet sheet = workbook.getSheetAt(i); + + List> rowList = new ArrayList<>(); + + // get first row as column name + Row headerRow = sheet.getRow(0); + + // foreach column + for (int j = 0; j < headerRow.getPhysicalNumberOfCells(); j++) { + Map sheetMap = new LinkedHashMap<>(); + Cell cell = headerRow.getCell(j); + if (hasHeader) { + sheetMap.put(cell.getStringCellValue(), "string"); + } else { + sheetMap.put("col_" + (j + 1), "string"); + } + rowList.add(sheetMap); + } + res.put(sheet.getSheetName(), rowList); + } + return res; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java index 7cbf579a9d1..c2418f4c33c 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java @@ -23,10 +23,9 @@ import org.apache.poi.ss.usermodel.Workbook; import java.io.File; +import java.io.IOException; import java.io.InputStream; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; +import java.util.*; import com.github.pjfanning.xlsx.StreamingReader; @@ -79,4 +78,54 @@ public static List> getBasicInfo(InputStream inputStream, File file } } } + + public static Map>> getAllSheetInfo( + InputStream inputStream, File file, Boolean hasHeader) throws IOException { + try { + Workbook wb = null; + if (inputStream != null) { + wb = + StreamingReader.builder() + // number of rows to keep in memory (defaults to 10) + .rowCacheSize(2) + .open(inputStream); + } else { + wb = + StreamingReader.builder() + // number of rows to keep in memory (defaults to 10) + .rowCacheSize(2) + .open(file); + } + Map>> res = new LinkedHashMap<>(wb.getNumberOfSheets()); + for (Sheet sheet : wb) { + Iterator iterator = sheet.iterator(); + Row row = null; + while (iterator.hasNext() && row == null) { + row = iterator.next(); + } + List> rowList = new ArrayList<>(); + if (row == null) { + res.put(sheet.getSheetName(), rowList); + continue; + } + int cellIdx = 0; + for (Cell cell : row) { + Map item = new LinkedHashMap<>(); + if (hasHeader) { + item.put(cell.getStringCellValue(), "string"); + } else { + item.put("col_" + (cellIdx + 1), "string"); + } + cellIdx++; + rowList.add(item); + } + res.put(sheet.getSheetName(), rowList); + } + return res; + } finally { + if (inputStream != null) { + inputStream.close(); + } + } + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java new file mode 100644 index 00000000000..955a8e1d60d --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.exception; + +public class ColLengthExceedException extends StorageWarnException { + + public ColLengthExceedException(int errCode, String desc) { + super(errCode, desc); + } + + public ColLengthExceedException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + public ColLengthExceedException(int errCode, String desc, Throwable t) { + super(errCode, desc, t); + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java new file mode 100644 index 00000000000..969b19d20b8 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.exception; + +public class ColumnIndexExceedException extends StorageWarnException { + + public ColumnIndexExceedException(int errCode, String desc) { + super(errCode, desc); + } + + public ColumnIndexExceedException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + public ColumnIndexExceedException(int errCode, String desc, Throwable t) { + super(errCode, desc, t); + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java index 3c82ceb5233..ae66c1cf997 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java @@ -20,7 +20,14 @@ public enum StorageErrorCode { /** */ - FS_NOT_INIT(53001, "please init first(请先初始化)"); + FS_NOT_INIT(53001, "please init first"), + INCONSISTENT_DATA(53001, "Inconsistent row data read,read %s,need rowLen %s"), + FS_OOM(53002, "OOM occurred while reading the file"), + FS_ERROR(53003, "Failed to operation fs"), + + READ_PARQUET_FAILED(53004, "Failed to read parquet file"), + + READ_ORC_FAILED(53005, "Failed to read orc file"); StorageErrorCode(int errorCode, String message) { this.code = errorCode; diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageReadException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageReadException.java new file mode 100644 index 00000000000..dedad1140c5 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageReadException.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.exception; + +import org.apache.linkis.common.exception.ErrorException; + +public class StorageReadException extends ErrorException { + + public StorageReadException(int errCode, String desc) { + super(errCode, desc); + } + + public StorageReadException(int errCode, String desc, Throwable t) { + super(errCode, desc); + initCause(t); + } + + public StorageReadException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java index b949c097545..9f53a6249b2 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java @@ -63,6 +63,6 @@ public Fs getFs(String user, String proxyUser, String label) { @Override public String fsName() { - return "hdfs"; + return StorageUtils.HDFS(); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java index 993de3969ff..ef88cec36d8 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java @@ -64,6 +64,6 @@ private FileSystem getProxyFs() { @Override public String fsName() { - return "file"; + return StorageUtils.FILE(); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java index c19213a9cce..ac505cb7b26 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java @@ -23,9 +23,15 @@ import java.io.File; import java.io.IOException; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class FileSystem implements Fs { + private static final Logger LOG = LoggerFactory.getLogger(FileSystem.class); + protected String user; private String defaultFilePerm = "rwxr-----"; // 740 private String defaultFolderPerm = "rwxr-x---"; // 750 @@ -46,6 +52,16 @@ public String getDefaultFolderPerm() { public abstract long getUsableSpace(FsPath dest) throws IOException; + public abstract long getLength(FsPath dest) throws IOException; + + public abstract String getChecksumWithMD5(FsPath dest) throws IOException; + + public abstract String getChecksum(FsPath dest) throws IOException; + + public abstract long getBlockSize(FsPath dest) throws IOException; + + public abstract List getAllFilePaths(FsPath dest) throws IOException; + public abstract boolean canExecute(FsPath dest) throws IOException; public abstract boolean setOwner(FsPath dest, String user, String group) throws IOException; @@ -94,6 +110,7 @@ protected FsPath getParentPath(String path) { } else { parentPath = path.substring(0, path.lastIndexOf("/")); } + LOG.info("Get parent path:" + parentPath); return new FsPath(parentPath); } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java index 7b52d8709f4..cda7f97ac2e 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java @@ -17,6 +17,7 @@ package org.apache.linkis.storage.fs.impl; +import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.common.io.FsPath; import org.apache.linkis.hadoop.common.conf.HadoopConf; import org.apache.linkis.hadoop.common.utils.HDFSUtils; @@ -30,10 +31,7 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.UserGroupInformation; @@ -44,6 +42,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,12 +52,19 @@ public class HDFSFileSystem extends FileSystem { public static final String HDFS_PREFIX_WITHOUT_AUTH = "hdfs:///"; public static final String HDFS_PREFIX_WITH_AUTH = "hdfs://"; private org.apache.hadoop.fs.FileSystem fs = null; - private Configuration conf = null; + private org.apache.hadoop.conf.Configuration conf = null; private String label = null; private static final Logger logger = LoggerFactory.getLogger(HDFSFileSystem.class); + private static final String LOCKER_SUFFIX = "refresh"; + + private static final int REFRESH_INTERVAL = + LinkisStorageConf.HDFS_FILE_SYSTEM_REFRESHE_INTERVAL() * 1000 * 60; + + private static final ConcurrentHashMap lastCallTimes = new ConcurrentHashMap<>(); + /** File System abstract method start */ @Override public String listRoot() throws IOException { @@ -82,7 +88,7 @@ public long getUsableSpace(FsPath dest) throws IOException { @Override public boolean canExecute(FsPath dest) throws IOException { - return canAccess(dest, FsAction.EXECUTE); + return canAccess(dest, FsAction.EXECUTE, this.user); } @Override @@ -177,26 +183,33 @@ public void init(Map properties) throws IOException { if (MapUtils.isNotEmpty(properties) && properties.containsKey(StorageConfiguration.PROXY_USER().key())) { user = StorageConfiguration.PROXY_USER().getValue(properties); + properties.remove(StorageConfiguration.PROXY_USER().key()); } if (user == null) { throw new IOException("User cannot be empty(用户不能为空)"); } - - conf = HDFSUtils.getConfigurationByLabel(user, label); - + if (label == null && Configuration.IS_MULTIPLE_YARN_CLUSTER()) { + label = StorageConfiguration.LINKIS_STORAGE_FS_LABEL().getValue(); + } + /** if properties is null do not to create conf */ if (MapUtils.isNotEmpty(properties)) { - for (String key : properties.keySet()) { - String v = properties.get(key); - if (StringUtils.isNotEmpty(v)) { - conf.set(key, v); + conf = HDFSUtils.getConfigurationByLabel(user, label); + if (MapUtils.isNotEmpty(properties)) { + for (String key : properties.keySet()) { + String v = properties.get(key); + if (StringUtils.isNotEmpty(v)) { + conf.set(key, v); + } } } } - if (StorageConfiguration.FS_CACHE_DISABLE().getValue()) { - conf.set("fs.hdfs.impl.disable.cache", "true"); + if (null != conf) { + fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + } else { + fs = HDFSUtils.getHDFSUserFileSystem(user, label); } - fs = HDFSUtils.getHDFSUserFileSystem(user, conf); + if (fs == null) { throw new IOException("init HDFS FileSystem failed!"); } @@ -246,7 +259,6 @@ public OutputStream write(FsPath dest, boolean overwrite) throws IOException { return fs.append(new Path(path)); } else { OutputStream out = fs.create(new Path(path), true); - this.setPermission(dest, this.getDefaultFilePerm()); return out; } } @@ -291,12 +303,16 @@ public List list(FsPath path) throws IOException { @Override public boolean canRead(FsPath dest) throws IOException { - return canAccess(dest, FsAction.READ); + return canAccess(dest, FsAction.READ, this.user); + } + + public boolean canRead(FsPath dest, String user) throws IOException { + return canAccess(dest, FsAction.READ, user); } @Override public boolean canWrite(FsPath dest) throws IOException { - return canAccess(dest, FsAction.WRITE); + return canAccess(dest, FsAction.WRITE, this.user); } @Override @@ -309,7 +325,7 @@ public boolean exists(FsPath dest) throws IOException { if ((message != null && message.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS())) || (rootCauseMessage != null && rootCauseMessage.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS()))) { - logger.info("Failed to execute exists, retry", e); + logger.info("Failed to execute exists for user {}, retry", user, e); resetRootHdfs(); return fs.exists(new Path(checkHDFSPath(dest.getPath()))); } else { @@ -320,15 +336,31 @@ public boolean exists(FsPath dest) throws IOException { private void resetRootHdfs() { if (fs != null) { - synchronized (this) { + String locker = user + LOCKER_SUFFIX; + synchronized (locker.intern()) { // NOSONAR if (fs != null) { if (HadoopConf.HDFS_ENABLE_CACHE()) { - HDFSUtils.closeHDFSFIleSystem(fs, user, true); + long currentTime = System.currentTimeMillis(); + Long lastCallTime = lastCallTimes.get(user); + + if (lastCallTime != null && (currentTime - lastCallTime) < REFRESH_INTERVAL) { + logger.warn( + "Method call denied for username: {} Please wait for {} minutes.", + user, + REFRESH_INTERVAL / 60000); + return; + } + lastCallTimes.put(user, currentTime); + HDFSUtils.closeHDFSFIleSystem(fs, user, label, true); } else { - HDFSUtils.closeHDFSFIleSystem(fs, user); + HDFSUtils.closeHDFSFIleSystem(fs, user, label); + } + logger.warn("{} FS reset close.", user); + if (null != conf) { + fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + } else { + fs = HDFSUtils.getHDFSUserFileSystem(user, label); } - logger.warn(user + "FS reset close."); - fs = HDFSUtils.getHDFSUserFileSystem(user, conf); } } } @@ -381,7 +413,7 @@ private FsPath fillStorageFile(FsPath fsPath, FileStatus fileStatus) throws IOEx return fsPath; } - private boolean canAccess(FsPath fsPath, FsAction access) throws IOException { + private boolean canAccess(FsPath fsPath, FsAction access, String user) throws IOException { String path = checkHDFSPath(fsPath.getPath()); if (!exists(fsPath)) { throw new IOException("directory or file not exists: " + path); @@ -464,4 +496,55 @@ private String checkHDFSPath(String path) { } return path; } + + @Override + public long getLength(FsPath dest) throws IOException { + FileStatus fileStatus = fs.getFileStatus(new Path(checkHDFSPath(dest.getPath()))); + return fileStatus.getLen(); + } + + @Override + public String getChecksumWithMD5(FsPath dest) throws IOException { + String path = checkHDFSPath(dest.getPath()); + if (!exists(dest)) { + throw new IOException("directory or file not exists: " + path); + } + MD5MD5CRC32FileChecksum fileChecksum = + (MD5MD5CRC32FileChecksum) fs.getFileChecksum(new Path(path)); + return fileChecksum.toString().split(":")[1]; + } + + @Override + public String getChecksum(FsPath dest) throws IOException { + String path = checkHDFSPath(dest.getPath()); + if (!exists(dest)) { + throw new IOException("directory or file not exists: " + path); + } + FileChecksum fileChecksum = fs.getFileChecksum(new Path(path)); + return fileChecksum.toString(); + } + + @Override + public long getBlockSize(FsPath dest) throws IOException { + String path = checkHDFSPath(dest.getPath()); + if (!exists(dest)) { + throw new IOException("directory or file not exists: " + path); + } + return fs.getBlockSize(new Path(path)); + } + + @Override + public List getAllFilePaths(FsPath path) throws IOException { + FileStatus[] stat = fs.listStatus(new Path(checkHDFSPath(path.getPath()))); + List fsPaths = new ArrayList<>(); + for (FileStatus f : stat) { + FsPath fsPath = fillStorageFile(new FsPath(f.getPath().toUri().getPath()), f); + if (fs.isDirectory(f.getPath())) { + fsPaths.addAll(getAllFilePaths(fsPath)); + } else { + fsPaths.add(fsPath); + } + } + return fsPaths; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java index ce2ee43b7e7..9c7a5961fbb 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java @@ -47,12 +47,7 @@ import java.nio.file.attribute.PosixFilePermissions; import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipalLookupService; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Stack; +import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,12 +70,14 @@ public String listRoot() throws IOException { @Override public long getTotalSpace(FsPath dest) throws IOException { String path = dest.getPath(); + LOG.info("Get total space with path:" + path); return new File(path).getTotalSpace(); } @Override public long getFreeSpace(FsPath dest) throws IOException { String path = dest.getPath(); + LOG.info("Get free space with path:" + path); return new File(path).getFreeSpace(); } @@ -117,6 +114,7 @@ public boolean setOwner(FsPath dest, String user, String group) throws IOExcepti @Override public boolean setOwner(FsPath dest, String user) throws IOException { + LOG.info("Set owner with path:" + dest.getPath() + "and user:" + user); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setOwner skip"); return true; @@ -133,6 +131,7 @@ public boolean setOwner(FsPath dest, String user) throws IOException { @Override public boolean setGroup(FsPath dest, String group) throws IOException { + LOG.info("Set group with path:" + dest.getPath() + " and group:" + group); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setGroup skip"); return true; @@ -155,6 +154,7 @@ public boolean mkdir(FsPath dest) throws IOException { @Override public boolean mkdirs(FsPath dest) throws IOException { String path = dest.getPath(); + LOG.info("Try to mkdirs with path:" + path); File file = new File(path); // Create parent directories one by one and set their permissions to rwxrwxrwx. Stack dirsToMake = new Stack(); @@ -182,6 +182,7 @@ public boolean mkdirs(FsPath dest) throws IOException { } public boolean canMkdir(FsPath destParentDir) throws IOException { + LOG.info("Try to check if the directory can be created with path:" + destParentDir.getPath()); if (!StorageUtils.isIOProxy()) { LOG.debug("io not proxy, not check owner, just check if have write permission "); return this.canWrite(destParentDir); @@ -203,6 +204,7 @@ public boolean canMkdir(FsPath destParentDir) throws IOException { @Override public boolean copy(String origin, String dest) throws IOException { File file = new File(dest); + LOG.info("Try to copy file from:" + origin + " to dest:" + dest); if (!isOwner(file.getParent())) { throw new IOException("you have on permission to create file " + dest); } @@ -213,7 +215,9 @@ public boolean copy(String origin, String dest) throws IOException { setOwner(new FsPath(dest), user, null); } } catch (Throwable e) { - file.delete(); + if (!file.delete()) { + throw new IOException("File delete failed!"); + } if (e instanceof IOException) { throw (IOException) e; } else { @@ -225,6 +229,7 @@ public boolean copy(String origin, String dest) throws IOException { @Override public boolean setPermission(FsPath dest, String permission) throws IOException { + LOG.info("Try to set permission dest with path:" + dest.getPath()); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setPermission as parent."); try { @@ -251,11 +256,20 @@ public boolean setPermission(FsPath dest, String permission) throws IOException public FsPathListWithError listPathWithError(FsPath path) throws IOException { File file = new File(path.getPath()); File[] files = file.listFiles(); + LOG.info("Try to list path:" + path.getPath() + " with error msg"); if (files != null) { List rtn = new ArrayList(); + Set fileNameSet = new HashSet<>(); + fileNameSet.add(path.getPath().trim()); String message = ""; for (File f : files) { try { + if (fileNameSet.contains(f.getPath())) { + LOG.info("File {} is duplicate", f.getPath()); + continue; + } else { + fileNameSet.add(f.getParent().trim()); + } rtn.add(get(f.getPath())); } catch (Throwable e) { LOG.warn("Failed to list path:", e); @@ -294,6 +308,7 @@ public void init(Map properties) throws IOException { String groupInfo; try { groupInfo = Utils.exec(new String[] {"id", user}); + LOG.info("Get groupinfo:" + groupInfo + " with shell command: id " + user); } catch (RuntimeException e) { group = user; return; @@ -322,7 +337,7 @@ public FsPath get(String dest) throws IOException { } else { fsPath = new FsPath(dest); } - + LOG.info("Try to get FsPath with path:" + fsPath.getPath()); PosixFileAttributes attr = null; try { attr = Files.readAttributes(Paths.get(fsPath.getPath()), PosixFileAttributes.class); @@ -365,19 +380,23 @@ public OutputStream write(FsPath dest, boolean overwrite) throws IOException { @Override public boolean create(String dest) throws IOException { - + LOG.info("try to create file with path:" + dest); File file = new File(dest); if (!isOwner(file.getParent())) { throw new IOException("you have on permission to create file " + dest); } - file.createNewFile(); + if (!file.createNewFile()) { + throw new IOException("create new file error! path:" + dest); + } try { setPermission(new FsPath(dest), this.getDefaultFilePerm()); if (!user.equals(getOwner(dest))) { setOwner(new FsPath(dest), user, null); } } catch (Throwable e) { - file.delete(); + if (!file.delete()) { + throw new IOException("delete file error!"); + } if (e instanceof IOException) { throw (IOException) e; } else { @@ -391,6 +410,7 @@ public boolean create(String dest) throws IOException { public List list(FsPath path) throws IOException { File file = new File(path.getPath()); File[] files = file.listFiles(); + LOG.info("Try to get file list with path:" + path.getPath()); if (files != null) { List rtn = new ArrayList(); for (File f : files) { @@ -411,6 +431,11 @@ public boolean canRead(FsPath dest) throws IOException { PosixFilePermission.OTHERS_READ); } + @Override + public boolean canRead(FsPath dest, String user) throws IOException { + return false; + } + @Override public boolean canWrite(FsPath dest) throws IOException { return can( @@ -478,4 +503,31 @@ private String getOwner(String path) throws IOException { PosixFileAttributes attr = Files.readAttributes(Paths.get(path), PosixFileAttributes.class); return attr.owner().getName(); } + + @Override + public long getLength(FsPath dest) throws IOException { + String path = dest.getPath(); + LOG.info("Get file length with path:" + path); + return new File(path).length(); + } + + @Override + public String getChecksum(FsPath dest) { + return null; + } + + @Override + public String getChecksumWithMD5(FsPath dest) { + return null; + } + + @Override + public long getBlockSize(FsPath dest) { + return 0L; + } + + @Override + public List getAllFilePaths(FsPath dest) { + return new ArrayList<>(); + } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala index bf03c787547..50c60fecd2e 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala @@ -25,21 +25,30 @@ import org.apache.commons.lang3.StringUtils object LinkisStorageConf { private val CONF_LOCK = new Object() + val enableLimitThreadLocal: ThreadLocal[String] = new ThreadLocal[String] + + val columnIndicesThreadLocal: ThreadLocal[Array[Int]] = new ThreadLocal[Array[Int]] + val HDFS_FILE_SYSTEM_REST_ERRS: String = CommonVars .apply( "wds.linkis.hdfs.rest.errs", - ".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*" + ".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*|.*The client is stopped.*" ) .getValue + val HDFS_FILE_SYSTEM_REFRESHE_INTERVAL: Int = + CommonVars + .apply("wds.linkis.hdfs.rest.interval", 10) + .getValue + val ROW_BYTE_MAX_LEN_STR = CommonVars("wds.linkis.resultset.row.max.str", "2m").getValue val ROW_BYTE_MAX_LEN = ByteTimeUtils.byteStringAsBytes(ROW_BYTE_MAX_LEN_STR) val FILE_TYPE = CommonVars( "wds.linkis.storage.file.type", - "dolphin,sql,scala,py,hql,python,out,log,text,sh,jdbc,ngql,psql,fql,tsql" + "dolphin,sql,scala,py,py3,hql,python,out,log,text,sh,jdbc,ngql,psql,fql,tsql,txt,aisql" ).getValue private var fileTypeArr: Array[String] = null @@ -60,4 +69,18 @@ object LinkisStorageConf { fileTypeArr } + val LINKIS_RESULT_ENABLE_NULL = CommonVars("linkis.resultset.enable.null.replace", false).getValue + + val LINKIS_RESULT_COLUMN_SIZE = + CommonVars("linkis.resultset.column.size.max", Int.MaxValue).getValue + + val LINKIS_RESULT_COL_LENGTH = + CommonVars("linkis.resultset.col.length.max", Int.MaxValue).getValue + + val LINKIS__READ_RESULT_ROW_MAX_LEN_STR = + CommonVars("linkis.resultset.read.row.max.str", "20m").getValue + + val LINKIS_READ_ROW_BYTE_MAX_LEN = + ByteTimeUtils.byteStringAsBytes(LINKIS__READ_RESULT_ROW_MAX_LEN_STR) + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala index c8ae290f2cc..93610a7db21 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala @@ -36,4 +36,13 @@ object CSVFsWriter { outputStream: OutputStream ): CSVFsWriter = new StorageCSVWriter(charset, separator, quoteRetouchEnable, outputStream) + def getCSVFSWriter( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream, + keepNewline: Boolean + ): CSVFsWriter = + new StorageCSVWriter(charset, separator, quoteRetouchEnable, outputStream, keepNewline) + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala index f9b811b6a69..95d98669b8d 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala @@ -35,7 +35,22 @@ class StorageCSVWriter( ) extends CSVFsWriter with Logging { + var keepNewline: Boolean = false; + + def this( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream, + keepNewline: Boolean + ) { + this(charset, separator, quoteRetouchEnable, outputStream) + this.keepNewline = keepNewline + } + private val delimiter = separator match { + // Compatible with possible missing escape characters + case "t" => '\t' case separ if StringUtils.isNotEmpty(separ) => separ case _ => '\t' } @@ -50,14 +65,23 @@ class StorageCSVWriter( private def compact(row: Array[String]): String = { val quotationMarks: String = "\"" + val dealNewlineSymbolMarks: String = "\n" + def decorateValue(v: String): String = { if (StringUtils.isBlank(v)) v else { + var res = v if (quoteRetouchEnable) { - s"$quotationMarks${v.replaceAll(quotationMarks, "")}$quotationMarks" - } else v + res = s"$quotationMarks${v.replaceAll(quotationMarks, "")}$quotationMarks" + } + if (!this.keepNewline) { + res = res.replaceAll(dealNewlineSymbolMarks, " ") + } + logger.debug("decorateValue with input:" + v + " output:" + res) + res } } + if (logger.isDebugEnabled()) { logger.debug("delimiter:" + delimiter.toString) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala index 55c82abd38b..036cd8bfa26 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala @@ -18,16 +18,16 @@ package org.apache.linkis.storage.domain import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.conf.LinkisStorageConf import java.math.{BigDecimal => JavaBigDecimal} import java.sql.{Date, Timestamp} object DataType extends Logging { - val NULL_VALUE = "NULL" val LOWCASE_NULL_VALUE = "null" - // TODO Change to fine-grained regular expressions(改为精细化正则表达式) - val DECIMAL_REGEX = "^decimal\\(\\d*\\,\\d*\\)".r.unanchored + + val DECIMAL_REGEX = "^decimal\\(\\s*\\d*\\s*,\\s*\\d*\\s*\\)".r.unanchored val SHORT_REGEX = "^short.*".r.unanchored val INT_REGEX = "^int.*".r.unanchored @@ -70,39 +70,53 @@ object DataType extends Logging { case _ => StringType } - def toValue(dataType: DataType, value: String): Any = Utils.tryCatch(dataType match { - case NullType => null - case StringType | CharType | VarcharType | StructType | ListType | ArrayType | MapType => - value - case BooleanType => if (isNumberNull(value)) null else value.toBoolean - case ShortIntType => if (isNumberNull(value)) null else value.toShort - case IntType => if (isNumberNull(value)) null else value.toInt - case LongType | BigIntType => if (isNumberNull(value)) null else value.toLong - case FloatType => if (isNumberNull(value)) null else value.toFloat - case DoubleType => if (isNumberNull(value)) null else value.toDouble - case DecimalType => if (isNumberNull(value)) null else new JavaBigDecimal(value) - case DateType => if (isNumberNull(value)) null else Date.valueOf(value) - case TimestampType => - if (isNumberNull(value)) null else Timestamp.valueOf(value).toString.stripSuffix(".0") - case BinaryType => if (isNull(value)) null else value.getBytes() - case _ => value - }) { t => - logger.debug(s"Failed to $value switch to dataType:", t) - value + def toValue(dataType: DataType, value: String): Any = { + var newValue: String = value + if (isLinkisNull(value)) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + return null + } else { + newValue = Dolphin.NULL + } + } + Utils.tryCatch(dataType match { + case NullType => null + case StringType | CharType | VarcharType | StructType | ListType | ArrayType | MapType => + newValue + case BooleanType => if (isNumberNull(newValue)) null else newValue.toBoolean + case ShortIntType => if (isNumberNull(newValue)) null else newValue.toShort + case IntType => if (isNumberNull(newValue)) null else newValue.toInt + case LongType | BigIntType => if (isNumberNull(newValue)) null else newValue.toLong + case FloatType => if (isNumberNull(newValue)) null else newValue.toFloat + case DoubleType => if (isNumberNull(newValue)) null else newValue.toDouble + case DecimalType => if (isNumberNull(newValue)) null else new JavaBigDecimal(newValue) + case DateType => if (isNumberNull(newValue)) null else Date.valueOf(newValue) + case TimestampType => + if (isNumberNull(newValue)) null else Timestamp.valueOf(newValue).toString.stripSuffix(".0") + case BinaryType => if (isNull(newValue)) null else newValue.getBytes() + case _ => newValue + }) { t => + logger.debug(s"Failed to $newValue switch to dataType:", t) + newValue + } + } + + def isLinkisNull(value: String): Boolean = { + if (value == null || value == Dolphin.LINKIS_NULL) true else false } def isNull(value: String): Boolean = - if (value == null || value == NULL_VALUE || value.trim == "") true else false + if (value == null || value == Dolphin.NULL || value.trim == "") true else false def isNumberNull(value: String): Boolean = - if (null == value || NULL_VALUE.equalsIgnoreCase(value) || value.trim == "") { + if (null == value || Dolphin.NULL.equalsIgnoreCase(value) || value.trim == "") { true } else { false } def valueToString(value: Any): String = { - if (null == value) return LOWCASE_NULL_VALUE + if (null == value) return null value match { case javaDecimal: JavaBigDecimal => javaDecimal.toPlainString @@ -144,5 +158,7 @@ case class Column(columnName: String, dataType: DataType, comment: String) { Array[Any](columnName, dataType, comment) } - override def toString: String = s"columnName:$columnName,dataType:$dataType,comment:$comment" + override def toString: String = + s"columnName:$columnName,dataType:$dataType,comment:$comment" + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala index 378c2c2ecb7..667f1a635e7 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala @@ -19,7 +19,11 @@ package org.apache.linkis.storage.domain import org.apache.linkis.common.utils.Logging import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.FAILED_TO_READ_INTEGER -import org.apache.linkis.storage.exception.StorageWarnException +import org.apache.linkis.storage.exception.{ + StorageErrorCode, + StorageErrorException, + StorageWarnException +} import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} import java.io.{InputStream, IOException} @@ -39,7 +43,10 @@ object Dolphin extends Logging { val COL_SPLIT_LEN = COL_SPLIT_BYTES.length val NULL = "NULL" - val NULL_BYTES = "NULL".getBytes("utf-8") + val NULL_BYTES = NULL.getBytes("utf-8") + + val LINKIS_NULL = "LINKIS_NULL" + val LINKIS_NULL_BYTES = LINKIS_NULL.getBytes("utf-8") val INT_LEN = 10 @@ -56,8 +63,27 @@ object Dolphin extends Logging { * @param len * @return */ - def getString(bytes: Array[Byte], start: Int, len: Int): String = - new String(bytes, start, len, Dolphin.CHAR_SET) + def getString(bytes: Array[Byte], start: Int, len: Int): String = { + try { + new String(bytes, start, len, Dolphin.CHAR_SET) + } catch { + case e: OutOfMemoryError => + logger.error("bytes to String oom {} Byte", bytes.length) + throw new StorageErrorException( + StorageErrorCode.FS_OOM.getCode, + StorageErrorCode.FS_OOM.getMessage, + e + ) + } + } + + def toStringValue(value: String): String = { + if (LINKIS_NULL.equals(value)) { + NULL + } else { + value + } + } /** * Read an integer value that converts the array to a byte of length 10 bytes diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala index ec9cf4f15fa..6da18d43fc6 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala @@ -19,9 +19,7 @@ package org.apache.linkis.storage.domain import java.lang.reflect.Type -import com.google.gson.GsonBuilder -import org.json4s.DefaultFormats -import org.json4s.jackson.Serialization.write +import com.google.gson.{GsonBuilder, ToNumberPolicy} /** * @param id @@ -58,23 +56,24 @@ case class MethodEntity( object MethodEntitySerializer { - implicit val formats = DefaultFormats - import org.json4s.jackson.JsonMethods._ - val gson = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create + val gson = new GsonBuilder() + .setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ") + .setObjectToNumberStrategy(ToNumberPolicy.LAZILY_PARSED_NUMBER) + .create /** * Serialized to code as a MethodEntity object 序列化为code为MethodEntity对象 * @param code * @return */ - def deserializer(code: String): MethodEntity = parse(code).extract[MethodEntity] + def deserializer(code: String): MethodEntity = gson.fromJson(code, classOf[MethodEntity]) /** * Serialize MethodEntity to code 序列化MethodEntity为code * @param methodEntity * @return */ - def serializer(methodEntity: MethodEntity): String = write(methodEntity) + def serializer(methodEntity: MethodEntity): String = gson.toJson(methodEntity) /** * Serialize a java object as a string 序列化java对象为字符串 diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala index 8d0f0bdde72..9ea83130df2 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala @@ -62,7 +62,7 @@ class StorageExcelWriter( val headerFont = workBook.createFont headerFont.setBold(true) headerFont.setFontHeightInPoints(14.toShort) - headerFont.setColor(IndexedColors.RED.getIndex) + headerFont.setColor(IndexedColors.BLACK.getIndex) val headerCellStyle = workBook.createCellStyle headerCellStyle.setFont(headerFont) headerCellStyle diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala index 38973ae1ab4..d4836731dbd 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala @@ -108,7 +108,7 @@ class DefaultResultSetFactory extends ResultSetFactory with Logging { if (StringUtils.isEmpty(resultSetType)) { throw new StorageWarnException( THE_FILE_IS_EMPTY.getErrorCode, - s"The file (${fsPath.getPath}) is empty(文件(${fsPath.getPath}) 为空)" + MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc, fsPath.getPath) ) } Utils.tryQuietly(inputStream.close()) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala index 663e379b5b6..e61cf36b3dd 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala @@ -19,6 +19,7 @@ package org.apache.linkis.storage.resultset import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} +import org.apache.linkis.common.utils.Logging import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED import org.apache.linkis.storage.exception.StorageErrorException @@ -26,7 +27,7 @@ import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord, Ta import java.io.InputStream -object ResultSetReader { +object ResultSetReader extends Logging { def getResultSetReader[K <: MetaData, V <: Record]( resultSet: ResultSet[K, V], @@ -83,6 +84,7 @@ object ResultSetReader { ) } val fs = FSFactory.getFs(resPath) + logger.info("Try to init Fs with path:" + resPath.getPath) fs.init(null) ResultSetReader.getResultSetReader(resultSet.asInstanceOf[TableResultSet], fs.read(resPath)) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala index fc303fbb5c2..7b3aca62d96 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala @@ -34,6 +34,7 @@ abstract class StorageResultSet[K <: MetaData, V <: Record] extends ResultSet[K, } else { parentDir.toPath + "/" + fileName + Dolphin.DOLPHIN_FILE_SUFFIX } + logger.info(s"Get result set path:${path}") new FsPath(path) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala index 3f49faf3edd..24dd0fdf8a8 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala @@ -20,11 +20,20 @@ package org.apache.linkis.storage.resultset import org.apache.linkis.common.io.{Fs, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.conf.LinkisStorageConf import org.apache.linkis.storage.domain.Dolphin -import org.apache.linkis.storage.exception.StorageWarnException +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary +import org.apache.linkis.storage.exception.{ + ColLengthExceedException, + StorageErrorCode, + StorageErrorException, + StorageWarnException +} +import org.apache.linkis.storage.resultset.table.TableMetaData import org.apache.linkis.storage.utils.StorageUtils import java.io.{ByteArrayInputStream, InputStream, IOException} +import java.text.MessageFormat import scala.collection.mutable.ArrayBuffer @@ -37,14 +46,10 @@ class StorageResultSetReader[K <: MetaData, V <: Record]( private val deserializer = resultSet.createResultSetDeserializer private var metaData: K = _ private var row: Record = _ - private var colCount = 0 private var rowCount = 0 private var fs: Fs = _ - private val READ_CACHE = 1024 - private val bytes = new Array[Byte](READ_CACHE) - def this(resultSet: ResultSet[K, V], value: String) = { this(resultSet, new ByteArrayInputStream(value.getBytes(Dolphin.CHAR_SET))) } @@ -74,24 +79,38 @@ class StorageResultSetReader[K <: MetaData, V <: Record]( case t: Throwable => throw t } - val rowBuffer = ArrayBuffer[Byte]() - var len = 0 - - // Read the entire line, except for the data of the line length(读取整行,除了行长的数据) - while (rowLen > 0 && len >= 0) { - if (rowLen > READ_CACHE) { - len = StorageUtils.readBytes(inputStream, bytes, READ_CACHE) - } else { - len = StorageUtils.readBytes(inputStream, bytes, rowLen) - } + if (rowLen > LinkisStorageConf.LINKIS_READ_ROW_BYTE_MAX_LEN) { + throw new ColLengthExceedException( + LinkisStorageErrorCodeSummary.RESULT_ROW_LENGTH.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_ROW_LENGTH.getErrorDesc, + rowLen.asInstanceOf[Object], + LinkisStorageConf.LINKIS_READ_ROW_BYTE_MAX_LEN.asInstanceOf[Object] + ) + ) + } - if (len > 0) { - rowLen -= len - rowBuffer ++= bytes.slice(0, len) - } + var bytes: Array[Byte] = null + try { + bytes = new Array[Byte](rowLen) + } catch { + case e: OutOfMemoryError => + logger.error("Result set read oom, read size {} Byte", rowLen) + throw new StorageErrorException( + StorageErrorCode.FS_OOM.getCode, + StorageErrorCode.FS_OOM.getMessage, + e + ) + } + val len = StorageUtils.readBytes(inputStream, bytes, rowLen) + if (len != rowLen) { + throw new StorageErrorException( + StorageErrorCode.INCONSISTENT_DATA.getCode, + String.format(StorageErrorCode.INCONSISTENT_DATA.getMessage, len.toString, rowLen.toString) + ) } rowCount = rowCount + 1 - rowBuffer.toArray + bytes } @scala.throws[IOException] @@ -136,6 +155,7 @@ class StorageResultSetReader[K <: MetaData, V <: Record]( @scala.throws[IOException] override def hasNext: Boolean = { if (metaData == null) getMetaData + val line = readLine() if (line == null) return false row = deserializer.createRecord(line) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala index 9c7947272c0..caed8c0ea08 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala @@ -23,6 +23,8 @@ import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.conf.LinkisStorageConf import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.fs.impl.HDFSFileSystem import org.apache.linkis.storage.utils.{FileSystemUtils, StorageUtils} import org.apache.commons.io.IOUtils @@ -86,10 +88,17 @@ class StorageResultSetWriter[K <: MetaData, V <: Record]( WRITER_LOCK_CREATE.synchronized { if (!fileCreated) { if (storePath != null && outputStream == null) { + logger.info(s"Try to create a new file:${storePath}, with proxy user:${proxyUser}") fs = FSFactory.getFsByProxyUser(storePath, proxyUser) fs.init(null) FileSystemUtils.createNewFile(storePath, proxyUser, true) + outputStream = fs.write(storePath, true) + fs match { + case fileSystem: FileSystem => + fileSystem.setPermission(storePath, "rwx------") + case _ => + } logger.info(s"Succeed to create a new file:$storePath") fileCreated = true } @@ -187,11 +196,11 @@ class StorageResultSetWriter[K <: MetaData, V <: Record]( } } Utils.tryFinally(if (outputStream != null) flush()) { - closeFs if (outputStream != null) { IOUtils.closeQuietly(outputStream) outputStream = null } + closeFs } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala index d8e05609642..64d2b18b9cf 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala @@ -27,10 +27,4 @@ class TableRecord(val row: Array[Any]) extends ResultRecord { new TableRecord(row) } - def tableRecordToString(nullValue: String = "NULL"): Array[String] = { - row.map { col => - StorageUtils.colToString(col, nullValue) - } - } - } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala index 40c4e031f7a..86d09e9532d 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala @@ -18,16 +18,29 @@ package org.apache.linkis.storage.resultset.table import org.apache.linkis.common.io.resultset.ResultDeserializer +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.conf.LinkisStorageConf import org.apache.linkis.storage.domain.{Column, DataType, Dolphin} +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.PARSING_METADATA_FAILED -import org.apache.linkis.storage.exception.StorageErrorException +import org.apache.linkis.storage.exception.{ + ColLengthExceedException, + ColumnIndexExceedException, + StorageErrorException +} + +import org.apache.commons.lang3.StringUtils + +import java.text.MessageFormat import scala.collection.mutable.ArrayBuffer -class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRecord] { +class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRecord] with Logging { var metaData: TableMetaData = _ + var columnSet: Set[Int] = null + import DataType._ override def createMetaData(bytes: Array[Byte]): TableMetaData = { @@ -47,13 +60,13 @@ class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRec val columns = new ArrayBuffer[Column]() for (i <- 0 until (colArray.length, 3)) { var len = colArray(i).toInt - val colName = Dolphin.getString(bytes, index, len) + val colName = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) index += len len = colArray(i + 1).toInt - val colType = Dolphin.getString(bytes, index, len) + val colType = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) index += len len = colArray(i + 2).toInt - val colComment = Dolphin.getString(bytes, index, len) + val colComment = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) index += len columns += Column(colName, colType, colComment) } @@ -75,16 +88,69 @@ class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRec colString.substring(0, colString.length - 1).split(Dolphin.COL_SPLIT) } else colString.split(Dolphin.COL_SPLIT) var index = Dolphin.INT_LEN + colByteLen - val data = colArray.indices.map { i => + var enableLimit: Boolean = false + if (StringUtils.isNotBlank(LinkisStorageConf.enableLimitThreadLocal.get())) { + enableLimit = true + } + val columnIndices: Array[Int] = LinkisStorageConf.columnIndicesThreadLocal.get() + if (columnSet == null && columnIndices != null) { + columnSet = columnIndices.toSet + } + + val lastIndex = + if (columnIndices != null && columnIndices.length > 0) columnIndices(columnIndices.length - 1) + else 0 + var realValueSize = colArray.size + + if (enableLimit && metaData.columns.size <= columnIndices(0)) { + throw new ColumnIndexExceedException( + LinkisStorageErrorCodeSummary.RESULT_COLUMN_INDEX_OUT_OF_BOUNDS.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_COLUMN_INDEX_OUT_OF_BOUNDS.getErrorDesc, + columnIndices(0).asInstanceOf[Object], + metaData.columns.size.asInstanceOf[Object] + ) + ) + } + + if (enableLimit && metaData.columns.size > lastIndex) { + realValueSize = columnIndices.length + } else if (enableLimit && metaData.columns.size <= lastIndex) { + realValueSize = metaData.columns.size % columnIndices.length + } + + val columnSize = colArray.size + val rowArray = new Array[Any](realValueSize) + + var colIdx = 0 + for (i <- 0 until columnSize) { val len = colArray(i).toInt val res = Dolphin.getString(bytes, index, len) + if (res.length > LinkisStorageConf.LINKIS_RESULT_COL_LENGTH && enableLimit) { + throw new ColLengthExceedException( + LinkisStorageErrorCodeSummary.RESULT_COL_LENGTH.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_COL_LENGTH.getErrorDesc, + res.length.asInstanceOf[Object], + LinkisStorageConf.LINKIS_RESULT_COL_LENGTH.asInstanceOf[Object] + ) + ) + } index += len - if (i >= metaData.columns.length) res - else { - toValue(metaData.columns(i).dataType, res) + // 如果enableLimit为true,则采取的是列分页 + if (enableLimit) { + if (columnSet.contains(i)) { + rowArray(colIdx) = toValue(metaData.columns(i).dataType, res) + colIdx += 1 + } + } else { + if (i >= metaData.columns.length) rowArray(i) = res + else { + rowArray(i) = toValue(metaData.columns(i).dataType, res) + } } - }.toArray - new TableRecord(data) + } + new TableRecord(rowArray) } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala index 94b6cb4c039..5d1738a3462 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala @@ -19,6 +19,7 @@ package org.apache.linkis.storage.resultset.table import org.apache.linkis.common.io.{MetaData, Record} import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.conf.LinkisStorageConf import org.apache.linkis.storage.domain.Dolphin import scala.collection.mutable.ArrayBuffer @@ -45,14 +46,20 @@ class TableResultSerializer extends ResultSerializer { * @param line */ def lineToBytes(line: Array[Any]): Array[Byte] = { - // Data cache(数据缓存) val dataBytes = ArrayBuffer[Array[Byte]]() - // Column cache(列缓存) val colIndex = ArrayBuffer[Array[Byte]]() var colByteLen = 0 var length = 0 line.foreach { data => - val bytes = if (data == null) Dolphin.NULL_BYTES else Dolphin.getBytes(data) + val bytes = if (data == null) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + Dolphin.LINKIS_NULL_BYTES + } else { + Dolphin.NULL_BYTES + } + } else { + Dolphin.getBytes(data) + } dataBytes += bytes val colBytes = Dolphin.getBytes(bytes.length) colIndex += colBytes += Dolphin.COL_SPLIT_BYTES diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala index 1a903440688..e0fa5b7f406 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala @@ -23,6 +23,7 @@ import org.apache.commons.io.IOUtils import org.apache.commons.math3.util.Pair import java.util +import java.util.Arrays import scala.collection.JavaConverters._ @@ -68,4 +69,14 @@ abstract class AbstractFileSource(var fileSplits: Array[FileSplit]) extends File override def getFileInfo(needToCountRowNumber: Int = 5000): Array[Pair[Int, Int]] = fileSplits.map(_.getFileInfo(needToCountRowNumber)) + override def limitBytes(limitBytes: Long): FileSource = { + fileSplits.foreach((fileSplit: FileSplit) => fileSplit.setLimitBytes(limitBytes)) + this + } + + override def limitColumnLength(limitColumnLength: Int): FileSource = { + fileSplits.foreach((fileSplit: FileSplit) => fileSplit.setLimitColumnLength(limitColumnLength)) + this + } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala index 7a9fa4f04c7..4b0b593db6f 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala @@ -18,6 +18,7 @@ package org.apache.linkis.storage.source import org.apache.linkis.common.io._ +import org.apache.linkis.common.utils.Logging import org.apache.linkis.storage.conf.LinkisStorageConf import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE import org.apache.linkis.storage.exception.StorageErrorException @@ -50,12 +51,16 @@ trait FileSource extends Closeable { def getTotalLine: Int + def limitBytes(limitBytes: Long): FileSource + + def limitColumnLength(limitColumnLength: Int): FileSource + def getTypes: Array[String] def getFileSplits: Array[FileSplit] } -object FileSource { +object FileSource extends Logging { private val fileType = LinkisStorageConf.getFileTypeArr private val suffixPredicate = (path: String, suffix: String) => path.endsWith(s".$suffix") @@ -124,6 +129,7 @@ object FileSource { } private def createResultSetFileSplit(fsPath: FsPath, fs: Fs): FileSplit = { + logger.info(s"try create result set file split with path:${fsPath.getPath}") val resultset = ResultSetFactory.getInstance.getResultSetByPath(fsPath, fs) val resultsetReader = ResultSetReader.getResultSetReader(resultset, fs.read(fsPath)) new FileSplit(resultsetReader, resultset.resultSetType()) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala index 467fbca3355..f4163e82631 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala @@ -51,6 +51,9 @@ class FileSplit( var params: util.Map[String, String] = new util.HashMap[String, String] + private var limitBytes = 0L + private var limitColumnLength = 0 + def page(page: Int, pageSize: Int): Unit = { if (!pageTrigger) { start = (page - 1) * pageSize @@ -124,13 +127,42 @@ class FileSplit( def collect(): Pair[Object, util.ArrayList[Array[String]]] = { val record = new util.ArrayList[Array[String]] - val metaData = `while`(collectMetaData, r => record.add(collectRecord(r))) + var overFlag = false + var tmpBytes = 0L + + val metaData = `while`( + collectMetaData, + r => { + if (limitBytes > 0 && !overFlag) { + val resArr = collectRecord(r) + resArr.foreach(res => tmpBytes = tmpBytes + res.getBytes.length) + if (tmpBytes > limitBytes) { + overFlag = true + } + record.add(resArr) + } else { + record.add(collectRecord(r)) + } + } + ) new Pair(metaData, record) } def collectRecord(record: Record): Array[String] = { record match { - case t: TableRecord => t.row.map(DataType.valueToString) + case t: TableRecord => + if (limitColumnLength > 0) { + t.row.map { col => + val str = DataType.valueToString(col) + if (str.length > limitColumnLength) { + str.substring(0, limitColumnLength) + } else { + str + } + } + } else { + t.row.map(DataType.valueToString) + } case l: LineRecord => Array(l.getLine) } } @@ -157,6 +189,14 @@ class FileSplit( def ifStartRead: Boolean = !pageTrigger || count >= start + def setLimitBytes(limitBytes: Long): Unit = { + this.limitBytes = limitBytes + } + + def setLimitColumnLength(limitColumnLength: Int): Unit = { + this.limitColumnLength = limitColumnLength + } + override def close(): Unit = IOUtils.closeQuietly(fsReader) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala index 7b387e9f19d..adbb596aa21 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala @@ -17,6 +17,7 @@ package org.apache.linkis.storage.source +import org.apache.linkis.storage.domain.Dolphin import org.apache.linkis.storage.resultset.table.TableRecord import org.apache.linkis.storage.utils.StorageUtils @@ -24,11 +25,27 @@ class ResultsetFileSource(fileSplits: Array[FileSplit]) extends AbstractFileSour shuffle({ case t: TableRecord => - new TableRecord(t.row.map { - case null | "NULL" => getParams.getOrDefault("nullValue", "NULL") - case "" => getParams.getOrDefault("nullValue", "") - case value: Double => StorageUtils.doubleToString(value) - case r => r + new TableRecord(t.row.map { rvalue => + { + rvalue match { + case null | "NULL" => + val nullValue = getParams.getOrDefault("nullValue", "NULL") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + rvalue + } else { + nullValue + } + case "" => + val nullValue = getParams.getOrDefault("nullValue", "") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + "" + } else { + nullValue + } + case value: Double => StorageUtils.doubleToString(value) + case _ => rvalue + } + } }) case record => record }) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala index 5252c12e037..9c344fa8028 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala @@ -61,6 +61,7 @@ object FileSystemUtils extends Logging { }(Utils.tryQuietly(fileSystem.close())) } + @deprecated("please use createNewFileAndSetOwnerWithFileSystem") def createNewFileWithFileSystem( fileSystem: FileSystem, filePath: FsPath, @@ -82,6 +83,31 @@ object FileSystemUtils extends Logging { } } + /** + * create new file and set file owner by FileSystem + * @param fileSystem + * @param filePath + * @param user + * @param createParentWhenNotExists + */ + def createNewFileAndSetOwnerWithFileSystem( + fileSystem: FileSystem, + filePath: FsPath, + user: String, + createParentWhenNotExists: Boolean + ): Unit = { + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent)) { + if (!createParentWhenNotExists) { + throw new IOException("parent dir " + filePath.getParent.getPath + " dose not exists.") + } + mkdirsAndSetOwner(fileSystem, filePath.getParent, user) + } + fileSystem.createNewFile(filePath) + fileSystem.setOwner(filePath, user) + } + } + /** * Recursively create a directory(递归创建目录) * @param fileSystem @@ -91,6 +117,7 @@ object FileSystemUtils extends Logging { * @return */ @throws[IOException] + @deprecated("please use mkdirsAndSetOwner") def mkdirs(fileSystem: FileSystem, dest: FsPath, user: String): Boolean = { var parentPath = dest.getParent val dirsToMake = new util.Stack[FsPath]() @@ -113,4 +140,32 @@ object FileSystemUtils extends Logging { true } + /** + * Recursively create a directory(递归创建目录) 默认添加 Owner 信息 + * @param fileSystem + * @param dest + * @param user + * @throws + * @return + */ + @throws[IOException] + def mkdirsAndSetOwner(fileSystem: FileSystem, dest: FsPath, user: String): Boolean = { + var parentPath = dest.getParent + val dirsToMake = new util.Stack[FsPath]() + dirsToMake.push(dest) + while (!fileSystem.exists(parentPath)) { + dirsToMake.push(parentPath) + parentPath = parentPath.getParent + } + if (!fileSystem.canExecute(parentPath)) { + throw new IOException("You have not permission to access path " + dest.getPath) + } + while (!dirsToMake.empty()) { + val path = dirsToMake.pop() + fileSystem.mkdir(path) + fileSystem.setOwner(path, user) + } + true + } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala index bb6f4463e92..e73991db151 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala @@ -35,6 +35,8 @@ object StorageConfiguration { val STORAGE_RS_FILE_SUFFIX = CommonVars("wds.linkis.storage.rs.file.suffix", ".dolphin") + val LINKIS_STORAGE_FS_LABEL = CommonVars("linkis.storage.default.fs.label", "linkis-storage") + val ResultTypes = List("%TEXT", "%TABLE", "%HTML", "%IMG", "%ANGULAR", "%SVG") val STORAGE_RESULT_SET_PACKAGE = @@ -77,9 +79,6 @@ object StorageConfiguration { val HDFS_PATH_PREFIX_REMOVE = CommonVars[Boolean]("wds.linkis.storage.hdfs.prefxi.remove", true) - val FS_CACHE_DISABLE = - CommonVars[java.lang.Boolean]("wds.linkis.fs.hdfs.impl.disable.cache", false) - val FS_CHECKSUM_DISBALE = CommonVars[java.lang.Boolean]("linkis.fs.hdfs.impl.disable.checksum", false) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala index 90eb319fa01..4b9368c0493 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala @@ -46,7 +46,11 @@ object StorageUtils extends Logging { nf.setMaximumFractionDigits(StorageConfiguration.DOUBLE_FRACTION_LEN.getValue) def doubleToString(value: Double): String = { - nf.format(value) + if (value.isNaN) { + "NaN" + } else { + nf.format(value) + } } def loadClass[T](classStr: String, op: T => String): Map[String, T] = { @@ -203,7 +207,12 @@ object StorageUtils extends Logging { def readBytes(inputStream: InputStream, bytes: Array[Byte], len: Int): Int = { var count = 0 var readLen = 0 - while (readLen < len) { + // 当使用s3存储结果文件时时,com.amazonaws.services.s3.model.S3InputStream无法正确读取.dolphin文件。需要在循环条件添加: + // readLen >= 0 + // To resolve the issue when using S3 to store result files and + // com.amazonaws.services.s3.model.S3InputStream to read .dolphin files, you need to add the + // condition readLen >= 0 in the loop. + while (readLen < len && readLen >= 0) { count = inputStream.read(bytes, readLen, len - readLen) if (count == -1 && inputStream.available() < 1) return readLen readLen += count @@ -211,19 +220,12 @@ object StorageUtils extends Logging { readLen } - def colToString(col: Any, nullValue: String = "NULL"): String = { - if (null == col) nullValue - else { - col match { - case value: Double => doubleToString(value) - case "NULL" | "" => nullValue - case _ => col.toString - } - } - } - def isIOProxy(): Boolean = { StorageConfiguration.ENABLE_IO_PROXY.getValue } + def isHDFSPath(fsPath: FsPath): Boolean = { + HDFS.equals(fsPath.getFsType) + } + } diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/excel/ExcelStorageReaderTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/excel/ExcelStorageReaderTest.java new file mode 100644 index 00000000000..8a7d3da0b07 --- /dev/null +++ b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/excel/ExcelStorageReaderTest.java @@ -0,0 +1,206 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.excel; + +import org.apache.commons.io.input.BOMInputStream; +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.Row; +import org.apache.poi.ss.usermodel.Sheet; +import org.apache.poi.ss.usermodel.Workbook; +import org.apache.poi.xssf.usermodel.XSSFWorkbook; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.StringUtils; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.util.*; + +import org.junit.jupiter.api.*; + +/** ExcelStorageReader Tester */ +public class ExcelStorageReaderTest { + + @Autowired private ExcelStorageReader excelStorageReader; + + @BeforeEach + @DisplayName("Each unit test method is executed once before execution") + public void before() throws Exception {} + + @AfterEach + @DisplayName("Each unit test method is executed once before execution") + public void after() throws Exception {} + + private static InputStream createExcelAndGetInputStream(int type) throws IOException { + Workbook workbook = new HSSFWorkbook(); + if (type == 1) { + workbook = new XSSFWorkbook(); + } + // 创建一个新的工作簿 + try { // HSSFWorkbook 用于处理 .xls 格式 + + // 创建一个工作表 + Sheet sheet = workbook.createSheet("Sheet1"); + + // 创建一行并在第一行写入一些数据(示例) + Row row = sheet.createRow(0); + Cell cell = row.createCell(0); + Cell cell2 = row.createCell(1); + cell.setCellValue("Hello"); + cell2.setCellValue("Hello2"); + + // 创建一个工作表 + Sheet sheet1 = workbook.createSheet("Sheet2"); + + // 创建一行并在第一行写入一些数据(示例) + Row row1 = sheet1.createRow(0); + Cell cell1 = row1.createCell(0); + Cell cell22 = row1.createCell(1); + cell1.setCellValue("Work"); + cell22.setCellValue("Work1"); + + // 将工作簿写入 ByteArrayOutputStream + try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { + workbook.write(outputStream); + + // 从 ByteArrayOutputStream 中获取 InputStream + return new ByteArrayInputStream(outputStream.toByteArray()); + } + } catch (Exception e) { + + } + return null; + } + + public InputStream createCSVInputStream(List> data) { + String csvData = convertToCSV(data); + return new ByteArrayInputStream(csvData.getBytes(StandardCharsets.UTF_8)); + } + + private String convertToCSV(List> data) { + StringBuilder csvData = new StringBuilder(); + + for (List row : data) { + for (String column : row) { + csvData.append(column).append(","); + } + csvData.deleteCharAt(csvData.length() - 1).append("\n"); + } + + return csvData.toString(); + } + + private Map getCsvInfo(InputStream in, boolean escapeQuotes, boolean hasHeader) + throws Exception { + HashMap csvMap = new LinkedHashMap<>(); + String[][] column = null; + // fix csv file with utf-8 with bom chart[] + BOMInputStream bomIn = new BOMInputStream(in, false); // don't include the BOM + BufferedReader reader = new BufferedReader(new InputStreamReader(bomIn, "utf-8")); // NOSONAR + + String header = reader.readLine(); + if (StringUtils.isEmpty(header)) { + throw new RuntimeException("内容为空"); + } + String[] line = header.split(",", -1); + int colNum = line.length; + column = new String[2][colNum]; + if (hasHeader) { + for (int i = 0; i < colNum; i++) { + column[0][i] = line[i]; + if (escapeQuotes) { + try { + csvMap.put(column[0][i].substring(1, column[0][i].length() - 1), "string"); + } catch (StringIndexOutOfBoundsException e) { + throw new RuntimeException("处理标题引号异常"); + } + } else { + csvMap.put(column[0][i], "string"); + } + } + } else { + for (int i = 0; i < colNum; i++) { + csvMap.put("col_" + (i + 1), "string"); + } + } + csvMap.forEach((key, value) -> System.out.println(key + ": " + value)); + return csvMap; + } + + @Test + public void getXlsSheetInfo() throws Exception { + Map>> sheetsInfo = + XlsUtils.getSheetsInfo(createExcelAndGetInputStream(0), true); + Assertions.assertTrue(sheetsInfo.containsKey("Sheet2")); + List> sheet2 = sheetsInfo.get("Sheet2"); + String work1 = ""; + for (Map sheetMap : sheet2) { + if (sheetMap.containsKey("Work1")) { + work1 = sheetMap.get("Work1"); + } + } + Assertions.assertEquals("string", work1); + } + + @Test + public void getXlsxSheetInfo() throws Exception { + Map>> sheetsInfo = + XlsxUtils.getAllSheetInfo(createExcelAndGetInputStream(1), null, true); + Assertions.assertTrue(sheetsInfo.containsKey("Sheet2")); + List> sheet2 = sheetsInfo.get("Sheet2"); + String work1 = ""; + for (Map sheetMap : sheet2) { + if (sheetMap.containsKey("Work1")) { + work1 = sheetMap.get("Work1"); + } + } + Assertions.assertEquals("string", work1); + } + + @Test + public void getCsvSheetInfo() throws Exception { + List> data = new ArrayList<>(); + data.add(Arrays.asList("Name", "Age", "City")); + data.add(Arrays.asList("John Doe", "30", "New York")); + data.add(Arrays.asList("Jane Smith", "25", "San Francisco")); + + // 有标题 + InputStream inputStream = createCSVInputStream(data); + Map csvMap = getCsvInfo(inputStream, false, true); + Assertions.assertEquals("string", csvMap.get("Name")); + + // 无标题 + InputStream inputStream1 = createCSVInputStream(data); + Map csvMap1 = getCsvInfo(inputStream1, false, false); + Assertions.assertEquals("string", csvMap1.get("col_1")); + + List> data1 = new ArrayList<>(); + data1.add(Arrays.asList("'Name'", "'Age'", "'City'")); + + // 有标题有引号 + InputStream inputStream2 = createCSVInputStream(data1); + Map csvMap2 = getCsvInfo(inputStream2, true, true); + Assertions.assertEquals("string", csvMap2.get("Name")); + + // 无标题 + InputStream inputStream3 = createCSVInputStream(data1); + Map csvMap3 = getCsvInfo(inputStream3, false, false); + Assertions.assertEquals("string", csvMap3.get("col_1")); + } +} diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala index 3bfc35b17ca..e0d3d7efb6a 100644 --- a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala @@ -25,7 +25,7 @@ class DataTypeTest { @DisplayName("constTest") def constTest(): Unit = { - val nullvalue = DataType.NULL_VALUE + val nullvalue = Dolphin.NULL val lowcasenullvalue = DataType.LOWCASE_NULL_VALUE Assertions.assertEquals("NULL", nullvalue) @@ -60,4 +60,19 @@ class DataTypeTest { } + @Test + @DisplayName("toValueTest") + def toValueTest(): Unit = { + val dateType = DataType.toDataType("double") + val str = DataType.toValue(dateType, "NaN") + Assertions.assertNotNull(str) + } + + @Test + @DisplayName("decimalTest") + def decimalTest(): Unit = { + val dateType = DataType.toDataType("decimal(10, 8)") + Assertions.assertTrue(dateType.typeName.equals("decimal")) + } + } diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala index 31d7e977aa3..ecd5c89cf96 100644 --- a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala @@ -46,7 +46,6 @@ class StorageConfigurationTest { val doublefractionlen = StorageConfiguration.DOUBLE_FRACTION_LEN.getValue val hdfspathprefixcheckon = StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue val hdfspathprefixremove = StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue - val fscachedisable = StorageConfiguration.FS_CACHE_DISABLE.getValue val fschecksumdisbale = StorageConfiguration.FS_CHECKSUM_DISBALE.getValue Assertions.assertEquals("hadoop", storagerootuser) @@ -76,7 +75,6 @@ class StorageConfigurationTest { Assertions.assertTrue(30 == doublefractionlen) Assertions.assertTrue(hdfspathprefixcheckon) Assertions.assertTrue(hdfspathprefixremove) - Assertions.assertFalse(fscachedisable) Assertions.assertFalse(fschecksumdisbale) } diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala new file mode 100644 index 00000000000..6ae15782eeb --- /dev/null +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.junit.jupiter.api.{Assertions, DisplayName, Test} + +class StorageUtilsTest { + + @Test + @DisplayName("doubleToStringTest") + def doubleToStringTest(): Unit = { + val str = StorageUtils.doubleToString(Double.NaN) + Assertions.assertEquals("NaN", str) + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml deleted file mode 100644 index 980141f9a39..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml +++ /dev/null @@ -1,87 +0,0 @@ - - - - 4.0.0 - - - org.apache.linkis - linkis-cli - ${revision} - - linkis-cli-application - jar - - - - org.apache.linkis - linkis-cli-common - ${project.version} - - - org.apache.linkis - linkis-cli-core - ${project.version} - - - org.apache.linkis - linkis-computation-client - ${project.version} - - - org.apache.linkis - linkis-gateway-httpclient-support - ${project.version} - - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - false - - false - out - false - false - - /src/main/assembly/distribution.xml - - - - - make-assembly - - single - - package - - - /src/main/assembly/distribution.xml - - - - - - - - - - diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml deleted file mode 100644 index a1cf04696a8..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml +++ /dev/null @@ -1,123 +0,0 @@ - - - - - linkis-cli - - dir - - false - - - - lib - true - true - false - false - true - runtime - - org.apache.hadoop:*:jar - org.apache.spark:*:jar - org.apache.zookeeper:*:jar - org.apache.avro:*:jar - com.google.code.findbugs:annotations:jar - commons-beanutils:commons-beanutils:jar - commons-codec:commons-codec:jar - commons-collections:commons-collections:jar - commons-io:commons-io:jar - org.checkerframework:checker-qual:jar - commons-lang:commons-lang:jar - org.apache.commons:commons-lang3:jar - commons-net:commons-net:jar - com.google.errorprone:error_prone_annotations:jar - com.google.guava:failureaccess:jar - com.google.code.gson:gson:jar - com.google.guava:guava:jar - commons-httpclient:commons-httpclient:jar - commons-net:commons-net:jar - com.google.errorprone:error_prone_annotations:jar - org.apache.httpcomponents:httpcore:jar - com.google.j2objc:j2objc-annotations:jar - com.fasterxml.jackson.core:jackson-annotations:jar - com.fasterxml.jackson.core:jackson-core:jar - com.fasterxml.jackson.core:jackson-databind:jar - com.fasterxml.jackson.module:jackson-module-parameter-names:jar - com.fasterxml.jackson.module:jackson-module-scala_2.11:jar - jakarta.annotation:jakarta.annotation-api:jar - org.javassist:javassist:jar - org.json4s:json4s-ast_2.11:jar - org.json4s:json4s-core_2.11:jar - org.json4s:json4s-scalap_2.11:jar - org.json4s:json4s-jackson_2.11:jar - org.apache.logging.log4j:log4j-api:jar - org.apache.logging.log4j:log4j-core:jar - org.apache.logging.log4j:log4j-jul:jar - org.apache.logging.log4j:log4j-slf4j-impl:jar - com.thoughtworks.paranamer:paranamer:jar - org.reflections:reflections:jar - org.scala-lang:scala-compiler:jar - org.scala-lang:scala-library:jar - org.scala-lang:scala-reflect:jar - org.scala-lang:scalap:jar - org.slf4j:slf4j-api:jar - - - - - - - ${basedir} - - - README* - LICENSE* - NOTICE* - - - - - - ${basedir}/src/main/resources/conf/ - - conf - - **/* - - 0777 - unix - - - - - ${basedir}/src/main/resources/bin/ - - bin - - **/* - - 0777 - unix - - - - - - \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java deleted file mode 100644 index 0c6fc1ec733..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.data.FinishedData; -import org.apache.linkis.cli.application.data.PreparedData; -import org.apache.linkis.cli.application.data.ProcessedData; -import org.apache.linkis.cli.application.interactor.command.LinkisCmdType; -import org.apache.linkis.cli.application.interactor.command.template.UniversalCmdTemplate; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperatorBuilder; -import org.apache.linkis.cli.application.suite.ExecutionSuite; -import org.apache.linkis.cli.application.suite.ExecutionSuiteFactory; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.exception.handler.CommandExceptionHandler; -import org.apache.linkis.cli.core.exception.handler.DefaultExceptionHandler; -import org.apache.linkis.cli.core.interactor.command.CmdTemplateFactory; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.Parser; -import org.apache.linkis.cli.core.interactor.command.parser.SingleCmdParser; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.properties.PropertiesLoader; -import org.apache.linkis.cli.core.interactor.properties.PropsFilesScanner; -import org.apache.linkis.cli.core.interactor.properties.StdPropsLoader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysEnvReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysPropsReader; -import org.apache.linkis.cli.core.interactor.result.DefaultResultHandler; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.interactor.validate.ParsedTplValidator; -import org.apache.linkis.cli.core.interactor.var.StdVarAccess; -import org.apache.linkis.cli.core.interactor.var.SysVarAccess; -import org.apache.linkis.cli.core.operator.JobOperatorBuilder; -import org.apache.linkis.cli.core.operator.JobOperatorFactory; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.PlainTextFileWriter; -import org.apache.linkis.cli.core.present.display.StdOutWriter; -import org.apache.linkis.cli.core.utils.LogUtils; -import org.apache.linkis.cli.core.utils.SchedulerUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisClientApplication { - private static Logger logger = LoggerFactory.getLogger(LinkisClientApplication.class); - - /** - * generate Templates load env variables TODO: load version info - * - * @return PreparedData - */ - private static PreparedData prepare() throws LinkisClientRuntimeException { - /* - generate template - */ - CmdTemplate template = new UniversalCmdTemplate(); - CmdTemplateFactory.register(template); - /* - load env variables - */ - Map propertiesMap = new HashMap<>(); - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReader(new SysPropsReader()) - .addPropertiesReader(new SysEnvReader()); - for (ClientProperties properties : loader.loadProperties()) { - propertiesMap.put(properties.getPropsId(), properties); - } - - return new PreparedData(propertiesMap); - } - - /** - * parse user input load user config load default config check if all inputs are ok - * - * @param args user input arguments - * @return ProcessedData - */ - private static ProcessedData processInput(String[] args, PreparedData preparedData) - throws Exception { - - if (preparedData == null) { - return null; - } - - /* - user input - */ - CmdTemplate template = CmdTemplateFactory.getTemplateCopy(LinkisCmdType.UNIVERSAL); - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(template) - .setFitter(new SingleTplFitter()); - - ParseResult result = parser.parse(args); - - ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); - parsedTplValidator.doValidation(result.getParsedTemplate()); - - Params params = result.getParams(); - logger.debug("==========params============\n" + Utils.GSON.toJson(params)); - - /* - VarAccess for sys_prop, sys_env - */ - Map propertiesMap = preparedData.getPropertiesMap(); - VarAccess sysVarAccess = - new SysVarAccess() - .setSysProp(propertiesMap.get(CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER)) - .setSysEnv(propertiesMap.get(CommonConstants.SYSTEM_ENV_IDENTIFIER)); - logger.debug("==========sys_var============\n" + Utils.GSON.toJson(sysVarAccess)); - - LogUtils.getInformationLogger() - .info( - "LogFile path: " - + sysVarAccess.getVar(String.class, AppKeys.LOG_PATH_KEY) - + "/" - + sysVarAccess.getVar(String.class, AppKeys.LOG_FILE_KEY)); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - // scan config files given root path - String configPath = sysVarAccess.getVar(String.class, AppKeys.CLIENT_CONFIG_ROOT_KEY); - String defaultConfFileName = - sysVarAccess.getVarOrDefault( - String.class, AppKeys.DEFAULT_CONFIG_FILE_NAME_KEY, AppConstants.DEFAULT_CONFIG_NAME); - if (StringUtils.isBlank(configPath)) { - throw new PropsException( - "PRP0007", - ErrorLevel.ERROR, - CommonErrMsg.PropsLoaderErr, - "configuration root path specified by env variable: " - + AppKeys.CLIENT_CONFIG_ROOT_KEY - + " is empty."); - } - - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - String userConfPath = null; - if (params.containsParam(AppKeys.LINKIS_CLIENT_USER_CONFIG)) { - userConfPath = - (String) params.getParamItemMap().get(AppKeys.LINKIS_CLIENT_USER_CONFIG).getValue(); - } - if (StringUtils.isNotBlank(userConfPath)) { - PropertiesReader reader = - new PropsFileReader() - .setPropsId(AppKeys.LINKIS_CLIENT_USER_CONFIG) - .setPropsPath(userConfPath); - readersList.add(reader); - } else { - LogUtils.getInformationLogger() - .info("User does not provide usr-configuration file. Will use default config"); - } - /* - load properties - */ - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - ClientProperties[] loaderResult = loader.loadProperties(); - for (ClientProperties properties : loaderResult) { - if (StringUtils.equals(properties.getPropsId(), AppKeys.LINKIS_CLIENT_USER_CONFIG)) { - for (Map.Entry prop : properties.entrySet()) { - if (StringUtils.startsWith( - (String) prop.getKey(), AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE)) { - throw new PropsException( - "PRP0007", - ErrorLevel.ERROR, - CommonErrMsg.PropsLoaderErr, - "User cannot specify non-customizable configuration: " + prop.getKey()); - } - } - } - propertiesMap.put(properties.getPropsId(), properties); - } - - /* - VarAccess for cmd, config - */ - VarAccess stdVarAccess = - new StdVarAccess() - .setCmdParams(params) - .setUserConf(propertiesMap.get(AppKeys.LINKIS_CLIENT_USER_CONFIG)) - .setDefaultConf(propertiesMap.get(defaultConfFileName)) - .init(); - logger.info("==========std_var============\n" + Utils.GSON.toJson(stdVarAccess)); - - /* - Prepare operator for accessing linkis - */ - JobOperatorBuilder builder = - new LinkisOperatorBuilder().setStdVarAccess(stdVarAccess).setSysVarAccess(sysVarAccess); - - JobOperatorFactory.register(AppKeys.REUSABLE_UJES_CLIENT, builder); - /* - Prepare DisplayOperator - */ - DisplayOperFactory.register(PresentModeImpl.STDOUT, new StdOutWriter()); - DisplayOperFactory.register(PresentModeImpl.TEXT_FILE, new PlainTextFileWriter()); - - return new ProcessedData( - AppConstants.DUMMY_CID, params.getCmdType(), stdVarAccess, sysVarAccess); - } - - /** - * submit job display result - * - * @return FinishedData - */ - private static FinishedData exec(ProcessedData data) throws Exception { - if (data == null) { - return null; - } - - ExecutionSuite suite = - ExecutionSuiteFactory.getSuite( - data.getCmdType(), data.getStdVarAccess(), data.getSysVarAccess()); - - /* - Get everything - */ - Map jobs = suite.getJobs(); - ResultHandler[] resultHandlers = suite.getResultHandlers(); - Execution execution = suite.getExecution(); - - /* - execute - */ - final Map jobsToKill = jobs; - Thread hook = new Thread(() -> execution.terminate(jobsToKill)); - if (jobsToKill != null && jobsToKill.size() != 0) { - Runtime.getRuntime().addShutdownHook(hook); - } - ExecutionResult result = execution.execute(jobs); - - Runtime.getRuntime().removeShutdownHook(hook); - - return new FinishedData(result, resultHandlers); - } - - public static void main(String[] args) { - - ExceptionHandler handler = new DefaultExceptionHandler(); - ProcessedData processedData = null; - FinishedData finishedData = null; - ExecutionResult executionResult = new ExecutionResultImpl(null, ExecutionStatusEnum.UNDEFINED); - PreparedData preparedData = null; - - try { - preparedData = prepare(); - } catch (Exception e) { - handler.handle(e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } - - try { - processedData = processInput(args, preparedData); - } catch (CommandException ce) { - new CommandExceptionHandler().handle(ce); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } catch (Exception e) { - handler.handle(e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } - - try { - finishedData = exec(processedData); - } catch (Exception e) { - handler.handle(e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } - - if (finishedData != null) { - executionResult = finishedData.getExecutionResult(); - if (executionResult == null) { - executionResult = new ExecutionResultImpl(null, ExecutionStatusEnum.UNDEFINED); - } - if (executionResult.getException() != null) { - handler.handle(executionResult.getException()); - new DefaultResultHandler().process(executionResult); - } else { - if (finishedData.getResultHandlers() != null) { - for (ResultHandler resultHandler : finishedData.getResultHandlers()) { - if (resultHandler != null) { - resultHandler.process(executionResult); - } - } - } - } - } else { - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - new DefaultResultHandler().process(executionResult); - } - - SchedulerUtils.shutDown(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppConstants.java deleted file mode 100644 index 711fc2d1560..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppConstants.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.constants; - -public class AppConstants { - - public static final String DEFAULT_CONFIG_NAME = "linkis-cli.properties"; - public static final Long JOB_QUERY_SLEEP_MILLS = 2000l; - public static final String RESULTSET_LOGO = "============ RESULT SET {0} ============"; - - public static final String RESULTSET_META_BEGIN_LOGO = "----------- META DATA ------------"; - public static final String RESULTSET_META_END_LOGO = "------------ END OF META DATA ------------"; - - public static final String RESULTSET_SEPARATOR_LOGO = "------------------------"; - - public static final Integer RESULTSET_PAGE_SIZE = 5000; - - public static final String JOB_CREATOR_DEFAULT = "LINKISCLI"; - - public static final String JOB_CREATOR_ASYNC_DEFAULT = "LINKISCLIASYNC"; - - public static final String DUMMY_CID = "dummy"; - - public static final String LINKIS_CLI = "LinkisCli"; - - public static final String UJES_MODE = "ujes"; - - public static final String ONCE_MODE = "once"; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppKeys.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppKeys.java deleted file mode 100644 index 7a38d9191f8..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppKeys.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.constants; - -public class AppKeys { - - /** User Not configurable */ - public static final String ADMIN_USERS = "hadoop,root,shangda"; - - public static final String LINKIS_CLIENT_NONCUSTOMIZABLE = "wds.linkis.client.noncustomizable"; - public static final String LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_USER_SPECIFICATION = - LINKIS_CLIENT_NONCUSTOMIZABLE - + ".enable.user.specification"; // allow user to specify submit user - public static final String LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_PROXY_USER = - LINKIS_CLIENT_NONCUSTOMIZABLE + ".enable.proxy.user"; // allow user to specify proxy user - - /** In env */ - public static final String LOG_PATH_KEY = "log.path"; - - public static final String LOG_FILE_KEY = "log.file"; - - public static final String CLIENT_CONFIG_ROOT_KEY = "conf.root"; - public static final String DEFAULT_CONFIG_FILE_NAME_KEY = "conf.file"; - public static final String LINUX_USER_KEY = "user.name"; - - /** Configurable */ - /* - execution type - */ - public static final String JOB_EXEC = "wds.linkis.client.exec"; - - public static final String JOB_EXEC_CODE = JOB_EXEC + ".code"; - - /* - jobContent type - */ - public static final String JOB_CONTENT = "wds.linkis.client.jobContent"; - - /* - source - */ - public static final String JOB_SOURCE = "wds.linkis.client.source"; - public static final String JOB_SOURCE_SCRIPT_PATH = - JOB_SOURCE + "." + LinkisKeys.KEY_SCRIPT_PATH; // corresponds to server api. - - /* - params - */ - public static final String JOB_PARAM_CONF = "wds.linkis.client.param.conf"; - public static final String JOB_PARAM_RUNTIME = "wds.linkis.client.param.runtime"; - public static final String JOB_PARAM_VAR = "wds.linkis.client.param.var"; - - /* - labels - */ - public static final String JOB_LABEL = "wds.linkis.client.label"; - public static final String JOB_LABEL_ENGINE_TYPE = - JOB_LABEL + "." + LinkisKeys.KEY_ENGINETYPE; // corresponds to server api. - public static final String JOB_LABEL_CODE_TYPE = - JOB_LABEL + "." + LinkisKeys.KEY_CODETYPE; // corresponds to server api. - public static final String JOB_LABEL_EXECUTEONCE = - JOB_LABEL + "." + LinkisKeys.KEY_EXECUTEONCE; // corresponds to server api. - - /* - Job command - */ - public static final String LINKIS_CLIENT_JOB = "wds.linkis.client.job"; - public static final String LINKIS_CLIENT_JOB_TYPE = "wds.linkis.client.job.type"; - public static final String LINKIS_CLIENT_JOB_ID = "wds.linkis.client.job.id"; - - /* - common - */ - public static final String LINKIS_CLIENT_COMMON = "wds.linkis.client.common"; - public static final String LINKIS_CLIENT_KILL_OPT = LINKIS_CLIENT_COMMON + ".kill"; - public static final String LINKIS_CLIENT_STATUS_OPT = LINKIS_CLIENT_COMMON + ".status"; - public static final String LINKIS_CLIENT_ASYNC_OPT = LINKIS_CLIENT_COMMON + ".async.submit"; - public static final String LINKIS_CLIENT_HELP_OPT = LINKIS_CLIENT_COMMON + ".help"; - public static final String LINKIS_CLIENT_DESC_OPT = LINKIS_CLIENT_COMMON + ".desc"; - public static final String LINKIS_CLIENT_LOG_OPT = LINKIS_CLIENT_COMMON + ".log"; - public static final String LINKIS_CLIENT_RESULT_OPT = LINKIS_CLIENT_COMMON + ".result"; - public static final String LINKIS_CLIENT_LIST_OPT = LINKIS_CLIENT_COMMON + ".list"; - public static final String LINKIS_CLIENT_MODE_OPT = LINKIS_CLIENT_COMMON + ".mode"; - public static final String LINKIS_CLIENT_USER_CONFIG = LINKIS_CLIENT_COMMON + ".user.conf"; - public static final String LINKIS_CLIENT_DEFAULT_CONFIG = LINKIS_CLIENT_COMMON + ".default.conf"; - public static final String LINKIS_COMMON_GATEWAY_URL = LINKIS_CLIENT_COMMON + ".gatewayUrl"; - public static final String LINKIS_COMMON_DIAPLAY_META_LOGO = - LINKIS_CLIENT_COMMON + ".display.meta.log"; - public static final String LINKIS_COMMON_LOG_FROMLINE = LINKIS_CLIENT_COMMON + ".fromline"; - public static final String LINKIS_COMMON_RESULT_FROMPAGE = LINKIS_CLIENT_COMMON + ".frompage"; - public static final String LINKIS_COMMON_RESULT_FROMIDX = LINKIS_CLIENT_COMMON + ".fromidx"; - public static final String LINKIS_COMMON_RESULTPATHS = LINKIS_CLIENT_COMMON + ".resultpaths"; - public static final String JOB_EXTRA_ARGUMENTS = - LINKIS_CLIENT_COMMON + "." + LinkisKeys.EXTRA_ARGUMENTS; - public static final String JOB_COMMON_CODE_PATH = LINKIS_CLIENT_COMMON + ".code.path"; - - // all static token , default static - public static final String LINKIS_CLIENT_COMMON_OUTPUT_PATH = - LINKIS_CLIENT_COMMON + ".output.path"; - - public static final String LINKIS_COMMON_AUTHENTICATION_STRATEGY = - LINKIS_CLIENT_COMMON + ".authStrategy"; - public static final String LINKIS_COMMON_TOKEN_KEY = LINKIS_CLIENT_COMMON + ".tokenKey"; - public static final String LINKIS_COMMON_TOKEN_VALUE = LINKIS_CLIENT_COMMON + ".tokenValue"; - - public static final String JOB_COMMON_SUBMIT_USER = LINKIS_CLIENT_COMMON + ".submitUser"; - public static final String JOB_COMMON_SUBMIT_PASSWORD = LINKIS_CLIENT_COMMON + ".submitPassword"; - public static final String JOB_COMMON_PROXY_USER = LINKIS_CLIENT_COMMON + ".proxyUser"; - public static final String JOB_COMMON_CREATOR = LINKIS_CLIENT_COMMON + ".creator"; - - public static final String UJESCLIENT_COMMON_CONNECTT_TIMEOUT = - LINKIS_CLIENT_COMMON + ".connectionTimeout"; - public static final String UJESCLIENT_COMMON_DISCOVERY_ENABLED = - LINKIS_CLIENT_COMMON + ".discoveryEnabled"; - public static final String UJESCLIENT_COMMON_LOADBALANCER_ENABLED = - LINKIS_CLIENT_COMMON + ".loadbalancerEnabled"; - public static final String UJESCLIENT_COMMON_MAX_CONNECTION_SIZE = - LINKIS_CLIENT_COMMON + ".maxConnectionSize"; - public static final String UJESCLIENT_COMMON_RETRY_ENABLED = - LINKIS_CLIENT_COMMON + ".retryEnabled"; - public static final String UJESCLIENT_COMMON_READTIMEOUT = LINKIS_CLIENT_COMMON + ".readTimeout"; - public static final String UJESCLIENT_COMMON_DWS_VERSION = LINKIS_CLIENT_COMMON + ".dwsVersion"; - - public static final String LINKIS_CLIENT_COMMON_RESULT_SET_PAGE_SIZE = - LINKIS_CLIENT_COMMON + ".resultset.page.size"; - - public static final String REUSABLE_UJES_CLIENT = "reusable.ujes.client"; - public static final String REUSABLE_ONCEJOB_CLIENT = "reusable.onceJob.client"; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/FinishedData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/FinishedData.java deleted file mode 100644 index 4438cf301ba..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/FinishedData.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.data; - -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; - -public class FinishedData { - ExecutionResult executionResult; - ResultHandler[] resultHandlers; - - public FinishedData(ExecutionResult executionResult, ResultHandler[] resultHandlers) { - this.executionResult = executionResult; - this.resultHandlers = resultHandlers; - } - - public ExecutionResult getExecutionResult() { - return executionResult; - } - - public void setExecutionResult(ExecutionResult executionResult) { - this.executionResult = executionResult; - } - - public ResultHandler[] getResultHandlers() { - return resultHandlers; - } - - public void setResultHandlers(ResultHandler[] resultHandlers) { - this.resultHandlers = resultHandlers; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/PreparedData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/PreparedData.java deleted file mode 100644 index bdde39e23e7..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/PreparedData.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.data; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; - -import java.util.Map; - -public class PreparedData { - Map propertiesMap; - - public PreparedData(Map propertiesMap) { - this.propertiesMap = propertiesMap; - } - - public Map getPropertiesMap() { - return propertiesMap; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/ProcessedData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/ProcessedData.java deleted file mode 100644 index a69b7c4b63b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/ProcessedData.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.data; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.var.VarAccess; - -public class ProcessedData { - String cid; - CmdType cmdType; - VarAccess stdVarAccess; - VarAccess sysVarAccess; - - public ProcessedData( - String cid, CmdType cmdType, VarAccess stdVarAccess, VarAccess sysVarAccess) { - this.cid = cid; - this.cmdType = cmdType; - this.stdVarAccess = stdVarAccess; - this.sysVarAccess = sysVarAccess; - } - - public String getCid() { - return cid; - } - - public CmdType getCmdType() { - return cmdType; - } - - public VarAccess getStdVarAccess() { - return stdVarAccess; - } - - public VarAccess getSysVarAccess() { - return sysVarAccess; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdType.java deleted file mode 100644 index f03c3e779b5..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdType.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.core.constants.CommonConstants; - -public enum LinkisCmdType implements CmdType { - UNIVERSAL(CommonConstants.UNIVERSAL_SUBCMD, 1, CommonConstants.UNIVERSAL_SUBCMD_DESC); - - private int id; - private String name; - private String desc; - - LinkisCmdType(String name, int id) { - this.id = id; - this.name = name; - this.desc = null; - } - - LinkisCmdType(String name, int id, String desc) { - this.id = id; - this.name = name; - this.desc = desc; - } - - @Override - public int getId() { - return this.id; - } - - @Override - public String getName() { - return this.name; - } - - @Override - public String getDesc() { - return this.desc; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java deleted file mode 100644 index e238038aea5..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java +++ /dev/null @@ -1,378 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command.template; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.interactor.command.LinkisCmdType; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.template.AbstractCmdTemplate; -import org.apache.linkis.cli.core.interactor.command.template.option.*; - -import org.apache.commons.lang3.StringUtils; - -import java.io.File; -import java.util.Arrays; - -public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneable { - - protected StdOption gatewayUrl = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_GATEWAY_URL, - new String[] {"--gatewayUrl"}, - "specify linkis gateway url", - true, - ""); - protected StdOption authenticatationStrategy = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, - new String[] {"--authStg"}, - "specify linkis authentication strategy", - true, - ""); - protected StdOption authKey = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_TOKEN_KEY, - new String[] {"--authKey"}, - "specify linkis authentication key(tokenKey)", - true, - ""); - protected StdOption authValue = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_TOKEN_VALUE, - new String[] {"--authVal"}, - "specify linkis authentication value(tokenValue)", - true, - ""); - protected StdOption userConfigPath = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_USER_CONFIG, - new String[] {"--userConf"}, - "specify user configuration file path(absolute)", - true, - ""); - protected StdOption killOpt = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_KILL_OPT, - new String[] {"--kill"}, - "specify linkis taskId for job to be killed", - true, - ""); - protected StdOption logOpt = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_LOG_OPT, - new String[] {"--log"}, - "specify linkis taskId for querying job status", - true, - ""); - protected StdOption resultOpt = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_RESULT_OPT, - new String[] {"--result"}, - "specify linkis taskId for querying job status", - true, - ""); - protected StdOption statusOpt = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_STATUS_OPT, - new String[] {"--status"}, - "specify linkis taskId for querying job status", - true, - ""); - protected StdOption asyncOpt = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_ASYNC_OPT, - new String[] {"--async"}, - "specify linkis taskId for querying job status", - true, - false); - protected StdOption modeOpt = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_MODE_OPT, - new String[] {"--mode"}, - "specify linkis execution mode: " - + AppConstants.UJES_MODE - + "/" - + AppConstants.ONCE_MODE - + ".", - true, - AppConstants.UJES_MODE); - protected Flag helpOpt = - flag( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_HELP_OPT, - new String[] {"--help"}, - "specify linkis taskId for querying job status", - true, - false); - - protected StdOption engineTypeOP = - option( - AppKeys.JOB_LABEL, - AppKeys.JOB_LABEL_ENGINE_TYPE, - new String[] {"-engineType"}, - "specify linkis engineType for this job", - true, - ""); - - protected StdOption codeTypeOp = - option( - AppKeys.JOB_LABEL, - AppKeys.JOB_LABEL_CODE_TYPE, - new String[] {"-codeType"}, - "specify linkis runType for this job", - true, - ""); - protected StdOption codePathOp = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_CODE_PATH, - new String[] {"-codePath"}, - "specify file path that contains code you want to execute", - true, - ""); - - protected StdOption codeOp = - option( - AppKeys.JOB_EXEC, - AppKeys.JOB_EXEC_CODE, - new String[] {"-code"}, - "specify code that you want to execute", - true, - ""); - - protected StdOption scriptPathOp = - option( - AppKeys.JOB_SOURCE, - AppKeys.JOB_SOURCE_SCRIPT_PATH, - new String[] {"-scriptPath"}, - "specify remote path for your uploaded script", - true, - ""); - - protected StdOption submitUser = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_SUBMIT_USER, - new String[] {"-submitUser"}, - "specify submit user for this job", - true, - ""); - - protected StdOption proxyUser = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_PROXY_USER, - new String[] {"-proxyUser"}, - "specify proxy user who executes your code in Linkis server-side", - true, - ""); - - protected StdOption creatorOp = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_CREATOR, - new String[] {"-creator"}, - "specify creator for this job", - true, - ""); - - protected StdOption outPathOp = - option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH, - new String[] {"-outPath"}, - "specify output path for resultSet. If not specified, then output reset to screen(stdout)", - true, - ""); - - protected MapOption confMapOp = - mapOption( - AppKeys.JOB_PARAM_CONF, - AppKeys.JOB_PARAM_CONF, - new String[] {"-confMap"}, - "specify configurationMap(startupMap) for your job. You can put any start-up parameters into this Map(e.g. spark.executor.instances). Input format: -confMap key1=value1 -confMap key2=value2", - true); - - protected MapOption runtimeMapOp = - mapOption( - AppKeys.JOB_PARAM_RUNTIME, - AppKeys.JOB_PARAM_RUNTIME, - new String[] {"-runtimeMap"}, - "specify runtimeMap for your job. You can put any start-up parameters into this Map(e.g. spark.executor.instances). Input format: -runtimeMap key1=value1 -runtimeMap key2=value2", - true); - - protected SpecialMapOption varMapOp = - speciaMapOption( - AppKeys.JOB_PARAM_VAR, - AppKeys.JOB_PARAM_VAR, - new String[] {"-varMap"}, - "specify variables map. Variables is for key-word substitution. Use \'${key}\' to specify key-word. Input substitution rule as follow: -varMap key1=value1 -varMap key2=value2", - true); - - protected MapOption labelMapOp = - mapOption( - AppKeys.JOB_LABEL, - AppKeys.JOB_LABEL, - new String[] {"-labelMap"}, - "specify label map. You can put any Linkis into this Map. Input format: -labelMap labelName1=labelValue1 -labelMap labelName2=labelValue2", - true); - - protected MapOption sourceMapOp = - mapOption( - AppKeys.JOB_SOURCE, - AppKeys.JOB_SOURCE, - new String[] {"-sourceMap"}, - "specify source map. Input format: -sourceMap key1=value1 -sourceMap key2=value2", - true); - - protected MapOption jobContentMapOp = - mapOption( - AppKeys.JOB_CONTENT, - AppKeys.JOB_CONTENT, - new String[] {"-jobContentMap"}, - "specify jobContent map. Input format: -jobContentMap key1=value1 -jobContentMap key2=value2", - true); - - protected Parameter argumentsParas = - parameter( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_EXTRA_ARGUMENTS, - "arguments", - "specify arguments if exist any", - true, - new String[] {""}); - - public UniversalCmdTemplate() { - super(LinkisCmdType.UNIVERSAL); - } - - @Override - public void checkParams() throws CommandException { - int cnt = 0; - if (statusOpt.hasVal()) { - cnt++; - } - if (killOpt.hasVal()) { - cnt++; - } - if (logOpt.hasVal()) { - cnt++; - } - if (resultOpt.hasVal()) { - cnt++; - } - if (helpOpt.hasVal()) { - cnt++; - } - if (cnt > 1) { - throw new ValidateException( - "VLD0001", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Can only specify 1 of: " - + statusOpt.getParamName() - + "/" - + killOpt.getParamName() - + "/" - + helpOpt.getParamName() - + "/"); - } else if (cnt == 0) { - int cnt2 = 0; - if (argumentsParas.hasVal()) { - if (!(argumentsParas.getValue() instanceof String[]) - || argumentsParas.getValue().length == 0) { - throw new ValidateException( - "VLD0001", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - argumentsParas.getParamName() - + "has raw-value but failed to convert it into String-array. Raw-value: " - + argumentsParas.getRawVal()); - } - String firstPara = argumentsParas.getValue()[0]; - if (StringUtils.startsWith(firstPara, "-")) { - throw new CommandException( - "CMD0011", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - this.cmdType, - "Illegal argument: " + Arrays.toString(argumentsParas.getValue())); - } - File file = new File(firstPara); - if (!file.exists() || !file.isFile()) { - throw new ValidateException( - "VLD0001", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Argument: \'" - + firstPara - + "\' is not a linkis-cli option. Assume it's script file, but no file named \'" - + firstPara - + "\' is found"); - } - cnt2++; - } - if (codeOp.hasVal()) { - cnt2++; - } - if (codePathOp.hasVal()) { - cnt2++; - } - if (!modeOpt.hasVal() - || StringUtils.equalsIgnoreCase(modeOpt.getValue(), AppConstants.UJES_MODE)) { - if (cnt2 > 1) { - throw new ValidateException( - "VLD0001", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Can only specify at most one of linkis-cli option: \''" - + codeOp.getParamName() - + "\' or \'" - + codePathOp.getParamName() - + "\' or \'script-path and script-arguments\'"); - } - if (cnt2 == 0) { - throw new ValidateException( - "VLD0001", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Need to specify at least one of linkis-cli option: \'" - + codeOp.getParamName() - + "\' or \'" - + codePathOp.getParamName() - + "\' or \'script-path and script-arguments\'."); - } - } - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJob.java deleted file mode 100644 index a32521c20e3..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJob.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.core.interactor.job.AbstractJob; - -public abstract class LinkisJob extends AbstractJob { - @Override - public abstract LinkisJobData getJobData(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisManageJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisManageJob.java deleted file mode 100644 index 7dbcaca88dd..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisManageJob.java +++ /dev/null @@ -1,522 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.application.interactor.job.subtype.LinkisManSubType; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; -import org.apache.linkis.cli.core.utils.SchedulerUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.text.MessageFormat; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisManageJob extends LinkisJob - implements ManagableBackendJob, TerminatableJob, LogAccessibleJob, ResultAccessibleJob { - private static final Logger logger = LoggerFactory.getLogger(LinkisManageJob.class); - - private LinkisJobManDesc jobDesc; - private LinkisJobData data; - private TerminateToken terminateToken = new TerminateToken(); - - @Override - public LinkisJobOperator getJobOperator() { - if (!(super.getJobOperator() instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - return (LinkisJobOperator) super.getJobOperator(); - } - - @Override - public void setOperator(JobOperator operator) { - if (!(operator instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - super.setOperator(operator); - } - - @Override - public LinkisJobManDesc getJobDesc() { - return jobDesc; - } - - public void setJobDesc(LinkisJobManDesc jobDesc) { - this.jobDesc = jobDesc; - } - - @Override - public LinkisJobData getJobData() { - return data; - } - - public void setJobData(LinkisJobData data) { - this.data = data; - } - - @Override - public boolean isSuccess() { - return data.isSuccess(); - } - - @Override - public void doManage() throws LinkisClientRuntimeException { - LinkisManSubType subType = (LinkisManSubType) getSubType(); - if (!(subType instanceof LinkisManSubType)) { - throw new LinkisClientExecutionException( - "EXE0030", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "JobSubType is not instance of JobManSubType"); - } - switch (subType) { - case STATUS: - try { - data.updateByOperResult( - getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - if (data.getJobStatus() != null) { - data.setSuccess(true); - } - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - } - break; - // case JOB_DESC: - // result = jobManagableBackendExecutor.queryJobDesc(job); - // break; - case LOG: - try { - ((LinkisLogData) data).setIncLogMode(false); - startRetrieveLog(); - waitLogFin(); - data.setSuccess(true); - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - } - break; - case RESULT: - try { - startRetrieveResult(); - data.setSuccess(true); - } catch (LinkisClientExecutionException e) { - if (e.getCode().equals("EXE0037")) { - ((LinkisResultData) data).sendResultFin(); // inform listener to stop - data.setSuccess(true); - } else { - data.setSuccess(false); - data.setException(e); - } - LogUtils.getInformationLogger().warn(e.getMessage()); - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - LogUtils.getInformationLogger().warn(e.getMessage()); - } - break; - // case LIST: - // resultData = jobManExecutor.queryJobList(job); - // break; - case KILL: - doKill(); - break; - default: - throw new LinkisClientExecutionException( - "EXE0002", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "JobSubType + \"" + subType + "\" is not supported"); - } - } - - @Override - public void startRetrieveLog() { - if (jobDesc.getUser() == null || jobDesc.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - data.updateByOperResult(getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - startRetrieveLogInternal(data); - } - - public void waitLogFin() { - if (!(data instanceof LinkisLogData)) { - return; - } - int retry = 0; - int MAX_RETRY = 300; // wait for 10 minutes after job finish - while (retry++ < MAX_RETRY) { - if (((LinkisLogData) data).logFinReceived()) { - return; - } - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - } - - public void startRetrieveLogInternal(JobData jobData) { - if (!(jobData instanceof LinkisLogData)) { - throw new LinkisClientExecutionException( - "EXE0034", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "JobData is not LinkisLogData"); - } - if (jobData.getUser() == null || jobData.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - LinkisLogData logData = (LinkisLogData) jobData; - if (logData.getJobStatus() != null) { - try { - Thread logConsumer = new Thread(() -> logData.notifyLogListener(), "Log-Consumer"); - Thread logRetriever = new Thread(() -> queryLogLoop(logData), "Log-Retriever"); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logRetriever); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logConsumer); - } catch (Exception e) { - logger.warn("Failed to retrieve log", e); - } - } - } - - public void queryLogLoop(LinkisLogData data) { - int curLogIdx; - int nextLogIdx; - boolean hasNext = true; - int retryCnt = 0; - final int MAX_RETRY = 12; // continues fails for 90s, then exit thread - try { - while (hasNext) { - curLogIdx = data.getNextLogLineIdx() == null ? 0 : data.getNextLogLineIdx(); - try { - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - queryJobLogFromLine(data, curLogIdx); - } catch (Exception e) { - logger.error("Cannot get inc-log:", e); - // and yes sometimes server may not be able to prepare persisted-log - retryCnt++; - if (retryCnt >= MAX_RETRY) { - logger.error( - "Continuously failing to query inc-log for " - + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 - + "s. Will no longer try to query log", - e); - break; - } - Utils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer - continue; - } - retryCnt = 0; - nextLogIdx = data.getNextLogLineIdx() == null ? curLogIdx : data.getNextLogLineIdx(); - if (data.isIncLogMode()) { - hasNext = data.hasNextLogLine() == null ? curLogIdx < nextLogIdx : data.hasNextLogLine(); - } else { - hasNext = curLogIdx < nextLogIdx; - } - if (curLogIdx >= nextLogIdx) { - String msg = - MessageFormat.format( - "Job is still running, status={0}, progress={1}", - data.getJobStatus(), String.valueOf(data.getJobProgress() * 100) + "%"); - logger.info(msg); - } - Utils.doSleepQuietly(AppConstants.JOB_QUERY_SLEEP_MILLS); - } - } catch (Exception e) { - logger.error("Something goes wrong. Job Log may be incomplete", e); - } finally { - data.sendLogFin(); - } - } - - private void queryJobLogFromLine(LinkisLogData data, int fromLine) - throws LinkisClientRuntimeException { - if (!data.getJobStatus().isJobFinishedState()) { - try { - data.updateByOperResult( - getJobOperator() - .queryRunTimeLogFromLine( - data.getUser(), data.getJobID(), data.getExecID(), fromLine)); - } catch (Exception e) { - // job is finished while we start query log(but request is not send). - // then probably server cache is gone and we got a exception here. - // however we cannot know if this happens based on the exception message - logger.warn( - "Caught exception when querying runtime-log. Probably server-side has close stream. Will try openLog api if Job is completed.", - e); - if (data.getJobStatus().isJobFinishedState()) { - CommonUtils.doSleepQuietly(500l); - data.updateByOperResult( - getJobOperator() - .queryPersistedLogFromLine( - data.getUser(), data.getJobID(), data.getExecID(), fromLine)); - } - } - } else { - try { - data.updateByOperResult( - getJobOperator() - .queryPersistedLogFromLine( - data.getLogPath(), data.getUser(), data.getJobID(), fromLine)); - } catch (Exception e) { - logger.error("Cannot get persisted-inc-log:", e); - // and yes sometimes server may not be able to prepare persisted-log - throw e; - } - } - } - - @Override - public void startRetrieveResult() { - if (!(data instanceof LinkisResultData)) { - throw new LinkisClientExecutionException( - "EXE0034", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "JobData is not LinkisResultData"); - } - if (jobDesc.getUser() == null || jobDesc.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - data.updateByOperResult(getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - if (data.getJobStatus() == null) { - throw new LinkisClientExecutionException( - "EXE0038", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "jobStatus is null"); - } - LinkisResultData resultData = (LinkisResultData) data; - if (!resultData.getJobStatus().isJobSuccess() - || StringUtils.isBlank(resultData.getResultLocation())) { - resultData.updateByOperResult( - getJobOperator().queryJobInfo(resultData.getUser(), resultData.getJobID())); - } - if (!resultData.getJobStatus().isJobSuccess()) { - // throw new LinkisClientExecutionException("EXE0035", ErrorLevel.ERROR, - // CommonErrMsg.ExecutionErr, "Job status is not success but \'" + - // resultData.getJobStatus() + "\'. Will not try to retrieve any Result"); - LogUtils.getInformationLogger() - .info( - "Job status is not success but \'" - + resultData.getJobStatus() - + "\'. Will not try to retrieve any Result"); - resultData.sendResultFin(); // inform listener to stop - return; - } - if (StringUtils.isBlank(resultData.getResultLocation())) { - throw new LinkisClientExecutionException( - "EXE0037", - ErrorLevel.WARN, - CommonErrMsg.ExecutionErr, - "Got blank ResultLocation from server. Job may not have result-set. Will not try to retrieve any Result"); - } - - resultData.updateByOperResult( - getJobOperator() - .queryResultSetPaths( - resultData.getUser(), resultData.getJobID(), resultData.getResultLocation())); - if (resultData.getResultSetPaths() == null || resultData.getResultSetPaths().length == 0) { - String msg = "Your job got no result."; - logger.warn(msg); - resultData.sendResultFin(); // inform listener to stop - resultData.setHasResult(false); - return; - } - - try { - resultData.setHasResult(true); - Thread resultRetriever = new Thread(() -> queryResultLoop(resultData), "Result-Retriever"); - SchedulerUtils.getCachedThreadPoolExecutor().execute(resultRetriever); - } catch (Exception e) { - logger.error("Failed to retrieve result", e); - throw e; - } - } - - public void queryResultLoop(LinkisResultData data) { - boolean hasNext = true; - int retryCnt = 0; - final int MAX_RETRY = 30; // continues fails for 250s, then exit - int idx = 0; - try { - while (hasNext) { - try { - hasNext = queryOneResult(data, idx); - } catch (LinkisClientRuntimeException e) { - logger.error("Cannot get result:", e); - retryCnt++; - if (retryCnt >= MAX_RETRY) { - logger.error( - "Continuously failing to query result for " - + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 - + "s. Will no longer try to query result", - e); - return; - } else { - hasNext = true; - } - Utils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer - continue; - } - idx++; - } - } catch (Exception e) { - logger.error("Something goes wrong. Job Result may be incomplete", e); - throw e; - } finally { - data.sendResultFin(); - } - } - - private boolean queryOneResult(LinkisResultData data, int idxResultSet) { - Integer curPage = 1; - boolean hasNextResult = true; - boolean hasNextPage = true; - while (hasNextPage) { - data.updateByOperResult( - getJobOperator() - .queryResultSetGivenResultSetPath( - data.getResultSetPaths(), - idxResultSet, - data.getUser(), - curPage, - AppConstants.RESULTSET_PAGE_SIZE)); - if (data.hasNextResultPage() == null) { - throw new LinkisClientExecutionException( - "EXE0040", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionResultErr, - "Something foes wrong. Got null as \'hasNextPage\'."); - } - hasNextPage = data.hasNextResultPage(); - - curPage++; - hasNextResult = idxResultSet + 1 < data.getResultSetPaths().length; - } - return hasNextResult; - } - - public void doKill() { - data.updateByOperResult(getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - if (data.getUser() == null || data.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - if (data.getJobStatus() == null) { - throw new LinkisClientExecutionException( - "EXE0038", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "jobStatus is null"); - } - String msg; - if (data.getJobStatus().isJobCancelled()) { - msg = "Kill job aborted: Job has already been canceled."; - data.setSuccess(false); - data.setMessage(msg); - } else if (data.getJobStatus().isJobFinishedState()) { - msg = "Kill job aborted: Job is already in finished-state(SUCCEED/FAILED)."; - data.setSuccess(false); - data.setMessage(msg); - // throw new LinkisClientExecutionException(JobStatus.FAILED, "EXE0004", - // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } else { - try { - data.updateByOperResult( - getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - data.updateByOperResult( - getJobOperator().kill(data.getUser(), data.getJobID(), data.getExecID())); - } catch (Exception e) { - data.setSuccess(false); - data.setMessage("Exception thrown when trying to send kill request"); - data.setException(e); - } - msg = "Kill request has been sent"; - LogUtils.getPlaintTextLogger().info(msg); - int retryCnt = 0; - final int MAX_RETRY = 5 * 6; - while (!data.getJobStatus().isJobFinishedState() && !data.getJobStatus().isJobCancelled()) { - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - try { - data.updateByOperResult( - getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - retryCnt = 0; // if exception then will not go here - } catch (Exception e) { - retryCnt++; - CommonUtils.doSleepQuietly(5 * CommonConstants.JOB_QUERY_SLEEP_MILLS); - if (retryCnt >= MAX_RETRY) { - data.setSuccess(false); - data.setMessage( - MessageFormat.format( - "After send kill. Client cannot get jobStatus from server continuously for {0} seconds. Client aborted. Assume kill failed! Error message: \n", - MAX_RETRY * 5 * CommonConstants.JOB_QUERY_SLEEP_MILLS)); - data.setException(e); - return; - } - } - } - if (data.getJobStatus().isJobFinishedState() && !data.getJobStatus().isJobCancelled()) { - msg = "Kill Failed: Job Current status: " + data.getJobStatus(); - data.setSuccess(false); - data.setMessage(msg); - // throw new LinkisClientExecutionException(JobStatus.FAILED, - // "EXE0004", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } else if (data.getJobStatus().isJobCancelled()) { - msg = - MessageFormat.format( - "Kill successful: jobId={0}, status={1}.", data.getJobID(), data.getJobStatus()); - data.setSuccess(true); - data.setMessage(msg); - // LogUtils.getPlaintTextLogger().info(msg); - } - } - return; - } - - @Override - public TerminateToken getTerminateToken() { - return terminateToken; - } - - public void setTerminateToken(TerminateToken terminateToken) { - this.terminateToken = terminateToken; - } - - @Override - public void terminate() throws LinkisClientRuntimeException { - return; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisOnceJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisOnceJob.java deleted file mode 100644 index 505b08cf26f..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisOnceJob.java +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisOnceJobData; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; -import org.apache.linkis.cli.core.utils.SchedulerUtils; - -import java.text.MessageFormat; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisOnceJob extends LinkisJob - implements ManagableBackendJob, - LogAccessibleJob, - ResultAccessibleJob, - AsyncBackendJob, - TerminatableJob { - - private static final Logger logger = LoggerFactory.getLogger(LinkisOnceJob.class); - - private LinkisOnceDesc jobDesc; - private LinkisOnceJobData data; - private TerminateToken terminateToken = new TerminateToken(); - private Boolean isAsync = false; - - public void setAsync(Boolean async) { - isAsync = async; - } - - public Boolean isAsync() { - return isAsync; - } - - @Override - public JobDescription getJobDesc() { - return jobDesc; - } - - public void setJobDesc(LinkisOnceDesc desc) { - this.jobDesc = desc; - } - - @Override - public TerminateToken getTerminateToken() { - return terminateToken; - } - - @Override - public LinkisJobData getJobData() { - return data; - } - - public void setJobData(LinkisOnceJobData data) { - this.data = data; - } - - @Override - public JobOperator getJobOperator() { - return null; - } - - /** AsyncBackendJob */ - @Override - public void submit() throws LinkisClientRuntimeException { - StringBuilder infoBuilder = new StringBuilder(); - infoBuilder - .append("connecting to linkis gateway:") - .append(data.getOnceJobAdapter().getServerUrl()); - LogUtils.getInformationLogger().info(infoBuilder.toString()); - data.getOnceJobAdapter().submit(); - data.getOnceJobAdapter().updateStatus(); - infoBuilder.setLength(0); - infoBuilder - .append("JobId:") - .append(data.getJobID()) - .append(System.lineSeparator()) - .append("ExecId:") - .append(data.getExecID()); - LogUtils.getPlaintTextLogger().info(infoBuilder.toString()); - if (isAsync) { - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSubmitted()); - } - } - - @Override - public void updateJobStatus() throws LinkisClientRuntimeException { - if (!data.getJobStatus().isJobFinishedState()) { - data.getOnceJobAdapter().updateStatus(); - String log2 = - "\n---------------------------------------------------\n" - + "\ttask " - + data.getJobID() - + " status is " - + data.getJobStatus() - + ", progress : " - + data.getJobProgress() - + "\n---------------------------------------------------"; - logger.info(log2); - } - } - - @Override - public void waitJobComplete() throws LinkisClientRuntimeException { - data.getOnceJobAdapter().waitForComplete(); - updateJobStatus(); - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSuccess()); - waitIncLogComplete(data); - } - - /** TerminatableJob */ - @Override - public void terminate() throws LinkisClientRuntimeException { - terminateToken.setTerminate(); - doKill(); - } - - @Override - public void startRetrieveResult() { - // TODO:wait for OnceJob to support this feature - data.sendResultFin(); - } - - @Override - public void startRetrieveLog() { - data.setIncLogMode(true); - startRetrieveLogInternal(data); - } - - public void startRetrieveLogInternal(LinkisOnceJobData jobData) { - if (!(jobData instanceof LinkisLogData)) { - throw new LinkisClientExecutionException( - "EXE0034", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "JobData is not LinkisLogData"); - } - if (jobData.getUser() == null || jobData.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - LinkisOnceJobData logData = jobData; - if (logData.getJobStatus() != null) { - try { - Thread logConsumer = new Thread(() -> logData.notifyLogListener(), "Log-Consumer"); - Thread logRetriever = new Thread(() -> queryLogLoop(logData), "Log-Retriever"); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logRetriever); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logConsumer); - } catch (Exception e) { - logger.warn("Failed to retrieve log", e); - } - } - } - - private void queryJobLogOneIteration(LinkisOnceJobData data) throws LinkisClientRuntimeException { - try { - data.getOnceJobAdapter().queryJobLogOneIteration(); - // - // data.updateByOperResult(getJobOperator().queryRunTimeLogFromLine(data.getUser(), - // data.getJobID(), data.getExecID(), fromLine)); - } catch (Exception e) { - // job is finished while we start query log(but request is not send). - // then probably server cache is gone and we got a exception here. - // however we cannot know if this happens based on the exception message - logger.warn( - "Caught exception when querying runtime-log. Probably server-side has close stream. Will try openLog api if Job is completed.", - e); - if (data.getJobStatus().isJobFinishedState()) { - CommonUtils.doSleepQuietly(500l); - // - // data.updateByOperResult(getJobOperator().queryPersistedLogFromLine(data.getUser(), - // data.getJobID(), data.getExecID(), fromLine)); - } - } - } - - public void queryLogLoop(LinkisOnceJobData data) { - boolean hasNext = true; - int retryCnt = 0; - final int MAX_RETRY = 12; // continues fails for 90s, then exit thread - try { - while (hasNext) { - try { - queryJobLogOneIteration(data); - } catch (Exception e) { - logger.error("Cannot get inc-log:", e); - // and yes sometimes server may not be able to prepare persisted-log - retryCnt++; - if (retryCnt >= MAX_RETRY) { - logger.error( - "Continuously failing to query inc-log for " - + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 - + "s. Will no longer try to query log", - e); - break; - } - Utils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer - continue; - } - retryCnt = 0; - if (data.isIncLogMode()) { - hasNext = - data.hasNextLogLine() == null - ? !data.getJobStatus().isJobFinishedState() - : data.hasNextLogLine(); - } else { - hasNext = false; - } - if (hasNext) { - String msg = - MessageFormat.format( - "Job is still running, status={0}, progress={1}", - data.getJobStatus(), String.valueOf(data.getJobProgress() * 100) + "%"); - logger.info(msg); - } - Utils.doSleepQuietly(AppConstants.JOB_QUERY_SLEEP_MILLS); - } - } catch (Exception e) { - logger.error("Something goes wrong. Job Log may be incomplete", e); - } finally { - data.sendLogFin(); - } - } - - private void waitIncLogComplete(LinkisJobData data) { - if (!(data instanceof LinkisOnceJobData)) { - return; - } - int retry = 0; - int MAX_RETRY = 300; // wait for 10 minutes after job finish - while (retry++ < MAX_RETRY) { - if (((LinkisOnceJobData) data).logFinReceived()) { - return; - } - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - String msg = - "Job is in Finished state(SUCCEED/FAILED/CANCELED) but client keep querying inclog for " - + (MAX_RETRY * CommonConstants.JOB_QUERY_SLEEP_MILLS / 1000) - + "seconds. Execution ends forcefully. Next will try handle execution result."; - logger.warn(msg); - LogUtils.getInformationLogger().warn(msg); - } - - // /** - // * LogAccessibleJob - // */ - // @Override - // public void startRetrieveLog() { - - // } - - /** ManagableBackendJob */ - @Override - public void doManage() throws LinkisClientRuntimeException {} - - @Override - public boolean isSuccess() { - return data.isSuccess(); - } - - private void doKill() { - String msg; - if (data.getJobStatus().isJobCancelled()) { - msg = "Kill job aborted: Job is failed or has already been canceled."; - data.setSuccess(false); - data.setMessage(msg); - } else if (data.getJobStatus().isJobFinishedState()) { - msg = "Kill job aborted: Job is already in finished-state(SUCCEED/FAILED)."; - data.setSuccess(false); - data.setMessage(msg); - // throw new LinkisClientExecutionException(JobStatus.FAILED, "EXE0004", - // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } else { - data.getOnceJobAdapter().kill(); - updateJobStatus(); - data.setSuccess(true); - data.setMessage("successfully killed job"); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisSubmitJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisSubmitJob.java deleted file mode 100644 index d1e71e4e8d4..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisSubmitJob.java +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisSubmitJob extends LinkisJob - implements AsyncBackendJob, LogAccessibleJob, ResultAccessibleJob, TerminatableJob { - private static final Logger logger = LoggerFactory.getLogger(LinkisSubmitJob.class); - - private LinkisSubmitDesc jobDesc; - private LinkisJobData data; - private TerminateToken terminateToken = new TerminateToken(); - private LinkisManageJob manageJob = new LinkisManageJob(); - private Boolean isAsync = false; - - public void setAsync(Boolean async) { - isAsync = async; - } - - public Boolean isAsync() { - return isAsync; - } - - @Override - public LinkisJobOperator getJobOperator() { - if (!(super.getJobOperator() instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - return (LinkisJobOperator) super.getJobOperator(); - } - - @Override - public void setOperator(JobOperator operator) { - if (!(operator instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - manageJob.setOperator(operator); - super.setOperator(operator); - } - - @Override - public LinkisSubmitDesc getJobDesc() { - return jobDesc; - } - - public void setJobDesc(LinkisSubmitDesc jobDesc) { - this.jobDesc = jobDesc; - } - - @Override - public LinkisJobData getJobData() { - return data; - } - - public void setJobData(LinkisJobData data) { - manageJob.setJobData(data); - this.data = data; - } - - @Override - public TerminateToken getTerminateToken() { - return terminateToken; - } - - public void setTerminateToken(TerminateToken terminateToken) { - this.terminateToken = terminateToken; - } - - @Override - public void submit() throws LinkisClientRuntimeException { - StringBuilder infoBuilder = new StringBuilder(); - infoBuilder.append("connecting to linkis gateway:").append(getJobOperator().getServerUrl()); - LogUtils.getInformationLogger().info(infoBuilder.toString()); - data.updateByOperResult(getJobOperator().submit(jobDesc)); - CommonUtils.doSleepQuietly(2000l); - LinkisJobManDesc jobManDesc = new LinkisJobManDesc(); - jobManDesc.setJobId(data.getJobID()); - jobManDesc.setUser(data.getUser()); - manageJob.setJobDesc(jobManDesc); - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - infoBuilder.setLength(0); - infoBuilder - .append("JobId:") - .append(data.getJobID()) - .append(System.lineSeparator()) - .append("TaskId:") - .append(data.getJobID()) - .append(System.lineSeparator()) - .append("ExecId:") - .append(data.getExecID()); - LogUtils.getPlaintTextLogger().info(infoBuilder.toString()); - if (isAsync) { - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSubmitted()); - } - } - - @Override - public void updateJobStatus() throws LinkisClientRuntimeException { - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - getJobOperator().queryJobStatus(data.getUser(), data.getJobID(), data.getExecID()); - String log2 = - "\n---------------------------------------------------\n" - + "\ttask " - + data.getJobID() - + " status is " - + data.getJobStatus() - + ", progress : " - + data.getJobProgress() - + "\n---------------------------------------------------"; - logger.info(log2); - } - - @Override - public void waitJobComplete() throws LinkisClientRuntimeException { - int retryCnt = 0; - final int MAX_RETRY = 30; - while (!data.getJobStatus().isJobFinishedState()) { - // query progress - try { - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - getJobOperator().queryJobStatus(data.getUser(), data.getJobID(), data.getExecID()); - } catch (Exception e) { - logger.warn("", e); - retryCnt++; - if (retryCnt >= MAX_RETRY) { - throw new LinkisClientExecutionException( - "EXE0013", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Cannot get jobStatus from server continuously for {0} seconds. Client aborted! Error message: \n", - MAX_RETRY * 5 * CommonConstants.JOB_QUERY_SLEEP_MILLS, - e); - } - CommonUtils.doSleepQuietly( - 5 * CommonConstants.JOB_QUERY_SLEEP_MILLS); // maybe server problem. sleep - // longer - continue; - } - retryCnt = 0; // reset counter - checkJobAvailability(data); - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSuccess()); - waitIncLogComplete(data); - } - - private void waitIncLogComplete(LinkisJobData data) { - if (!(data instanceof LinkisLogData)) { - return; - } - int retry = 0; - int MAX_RETRY = 300; // wait for 10 minutes after job finish - while (retry++ < MAX_RETRY) { - if (((LinkisLogData) data).logFinReceived()) { - return; - } - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - String msg = - "Job is in Finished state(SUCCEED/FAILED/CANCELED) but client keep querying inclog for " - + (MAX_RETRY * CommonConstants.JOB_QUERY_SLEEP_MILLS / 1000) - + "seconds. Execution ends forcefully. Next will try handle execution result."; - logger.warn(msg); - LogUtils.getInformationLogger().warn(msg); - } - - @Override - public void startRetrieveResult() { - try { - manageJob.startRetrieveResult(); - data.setSuccess(true); - } catch (LinkisClientExecutionException e) { - if (e.getCode().equals("EXE0037")) { - data.setSuccess(true); - LogUtils.getInformationLogger().warn(e.getMessage()); - } else { - data.setSuccess(false); - data.setException(e); - } - ((LinkisResultData) data).sendResultFin(); // inform listener to stop - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - ((LinkisResultData) data).sendResultFin(); // inform listener to stop - } - } - - @Override - public void startRetrieveLog() { - if (!(data instanceof LinkisLogData)) { - throw new LinkisClientExecutionException( - "EXE0034", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "JobData is not LinkisLogData"); - } - LinkisLogData dataCopy; - try { - dataCopy = ((LinkisLogData) data).clone(); // make a copy to avoid race condition - } catch (CloneNotSupportedException e) { - throw new LinkisClientExecutionException( - "EXE0035", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "logData is not Cloneable", e); - } - dataCopy.setIncLogMode(true); - manageJob.startRetrieveLogInternal(dataCopy); - } - - @Override - public void terminate() throws LinkisClientRuntimeException { - terminateToken.setTerminate(); - // kill job if job is submitted - if (StringUtils.isNotBlank(data.getJobID())) { - System.out.println("\nKilling job: " + data.getJobID()); - try { - manageJob.doKill(); - if (data.getJobStatus().isJobCancelled()) { - System.out.println("Successfully killed job: " + data.getJobID() + " on exit"); - } else { - System.out.println( - "Failed to kill job: " - + data.getJobID() - + " on exit. Current job status: " - + data.getJobStatus()); - } - } catch (Exception e) { - System.out.println("Failed to kill job: " + data.getJobID() + " on exit"); - System.out.println(ExceptionUtils.getStackTrace(e)); - } - } - } - - private void checkJobAvailability(LinkisJobData data) throws LinkisClientRuntimeException { - if (data.getJobStatus().isJobAbnormalStatus()) { - throw new LinkisClientExecutionException( - "EXE0006", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Job is in abnormal status: " + CommonUtils.GSON.toJson(data)); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisManageJobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisManageJobBuilder.java deleted file mode 100644 index ef5648b993d..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisManageJobBuilder.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisManageJob; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.application.utils.ExecutionUtils; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.operator.JobOperatorFactory; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashSet; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisManageJobBuilder extends JobBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisSubmitJobBuilder.class); - - LinkisClientListener logListener; - - public LinkisManageJobBuilder setLogListener(LinkisClientListener observer) { - this.logListener = observer; - return this; - } - - @Override - protected LinkisJobManDesc buildJobDesc() { - LinkisJobManDesc desc = new LinkisJobManDesc(); - String osUser = sysVarAccess.getVar(String.class, AppKeys.LINUX_USER_KEY); - String[] adminUsers = StringUtils.split(AppKeys.ADMIN_USERS, ','); - Set adminSet = new HashSet<>(); - for (String admin : adminUsers) { - adminSet.add(admin); - } - String submitUsr = ExecutionUtils.getSubmitUser(stdVarAccess, osUser, adminSet); - - String jobId = null; - if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_KILL_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_KILL_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_STATUS_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_STATUS_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_DESC_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_DESC_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_LOG_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_LOG_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_RESULT_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_RESULT_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_LIST_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_LIST_OPT); - } - - desc.setJobId(jobId); - desc.setUser(submitUsr); - return desc; - } - - @Override - protected LinkisJobData buildJobData() { - LinkisJobDataImpl data = new LinkisJobDataImpl(); - if (logListener == null) { - logger.warn("logListener is not registered, will not be able to display log"); - } else { - data.registerincLogListener(logListener); - } - return data; - } - - @Override - protected LinkisJobOperator buildJobOperator() { - LinkisJobOperator oper; - try { - oper = (LinkisJobOperator) JobOperatorFactory.getReusable(AppKeys.REUSABLE_UJES_CLIENT); - } catch (Exception e) { - throw new LinkisClientRuntimeException( - "BLD0012", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Failed to get a valid operator.", - e); - } - return oper; - } - - @Override - protected PresentWay buildPresentWay() { - PresentWayImpl presentWay = new PresentWayImpl(); - String outputPath = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); - - presentWay.setPath(outputPath); - presentWay.setMode(PresentModeImpl.STDOUT); - if (StringUtils.isNotBlank(outputPath)) { - presentWay.setMode(PresentModeImpl.TEXT_FILE); - } - - return presentWay; - } - - @Override - protected LinkisManageJob getTargetNewInstance() { - return new LinkisManageJob(); - } - - @Override - public LinkisManageJob build() { - ((LinkisManageJob) targetObj).setJobDesc(buildJobDesc()); - ((LinkisManageJob) targetObj).setJobData(buildJobData()); - targetObj.setOperator(buildJobOperator()); - targetObj.setPresentWay(buildPresentWay()); - return (LinkisManageJob) super.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisOnceJobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisOnceJobBuilder.java deleted file mode 100644 index 15a2eb3ac33..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisOnceJobBuilder.java +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisOnceJob; -import org.apache.linkis.cli.application.interactor.job.data.LinkisOnceJobData; -import org.apache.linkis.cli.application.interactor.job.data.SimpleOnceJobAdapter; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.utils.ExecutionUtils; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; - -import org.apache.commons.lang3.StringUtils; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisOnceJobBuilder extends JobBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisSubmitJobBuilder.class); - - private LinkisClientListener logListener; - private Boolean isAsync = false; - private SimpleOnceJobAdapter onceJobAdapter = new SimpleOnceJobAdapter(); - - public LinkisOnceJobBuilder setLogListener(LinkisClientListener observer) { - this.logListener = observer; - return this; - } - - public LinkisOnceJobBuilder setAsync(Boolean async) { - isAsync = async; - return this; - } - - @Override - protected LinkisOnceJob getTargetNewInstance() { - return new LinkisOnceJob(); - } - - @Override - protected LinkisOnceDesc buildJobDesc() { - LinkisOnceDesc desc = new LinkisOnceDesc(); - - desc.setStdVarAccess(stdVarAccess); - desc.setSysVarAccess(sysVarAccess); - - Map confMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_CONF); - Map runtimeMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_RUNTIME); - Map varMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_VAR); - Map labelMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_LABEL); - Map sourceMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_SOURCE); - Map executionMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_EXEC); - Map jobContentMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_CONTENT); - - confMap = confMap == null ? new HashMap<>() : confMap; - runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; - varMap = varMap == null ? new HashMap<>() : varMap; - labelMap = labelMap == null ? new HashMap<>() : labelMap; - sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; - executionMap = executionMap == null ? new HashMap<>() : executionMap; - jobContentMap = jobContentMap == null ? new HashMap<>() : jobContentMap; - - confMap = ProcessKeyUtils.removePrefixForKeysInMap(confMap); - runtimeMap = ProcessKeyUtils.removePrefixForKeysInMap(runtimeMap); - labelMap = ProcessKeyUtils.removePrefixForKeysInMap(labelMap); - sourceMap = ProcessKeyUtils.removePrefixForKeysInMap(sourceMap); - executionMap = ProcessKeyUtils.removePrefixForKeysInMap(executionMap); - jobContentMap = ProcessKeyUtils.removePrefixForKeysInMap(jobContentMap); - - for (String key : stdVarAccess.getAllVarKeys()) { - Object val = stdVarAccess.getVar(Object.class, key); - if (!(val instanceof Map) && val != null) { - // note that we allow it to overwrite existing values in map - if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_CONF)) { - ProcessKeyUtils.removePrefixAndPutValToMap(confMap, key, val, AppKeys.JOB_PARAM_CONF); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_VAR)) { - ProcessKeyUtils.removePrefixAndPutValToMap(varMap, key, val, AppKeys.JOB_PARAM_VAR); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_RUNTIME)) { - ProcessKeyUtils.removePrefixAndPutValToMap( - runtimeMap, key, val, AppKeys.JOB_PARAM_RUNTIME); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_EXEC)) { - ProcessKeyUtils.removePrefixAndPutValToMap(executionMap, key, val, AppKeys.JOB_EXEC); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_LABEL)) { - ProcessKeyUtils.removePrefixAndPutValToMap(labelMap, key, val, AppKeys.JOB_LABEL); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_SOURCE)) { - ProcessKeyUtils.removePrefixAndPutValToMap(sourceMap, key, val, AppKeys.JOB_SOURCE); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_CONTENT)) { - ProcessKeyUtils.removePrefixAndPutValToMap(jobContentMap, key, val, AppKeys.JOB_CONTENT); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.LINKIS_CLIENT_COMMON)) { - // do nothing - } else { - // confMap.put(key, stdVarAccess.getVar(Object.class, key)); - } - } - } - - String creator; - if (!isAsync) { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_DEFAULT); - } else { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_ASYNC_DEFAULT); - } - String code = stdVarAccess.getVar(String.class, AppKeys.JOB_EXEC_CODE); - String engineType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_ENGINE_TYPE); - String runType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_CODE_TYPE); - String scriptPath = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); - - String osUser = sysVarAccess.getVar(String.class, AppKeys.LINUX_USER_KEY); - String[] adminUsers = StringUtils.split(AppKeys.ADMIN_USERS, ','); - Set adminSet = new HashSet<>(); - for (String admin : adminUsers) { - adminSet.add(admin); - } - String submitUsr = ExecutionUtils.getSubmitUser(stdVarAccess, osUser, adminSet); - String proxyUsr = ExecutionUtils.getProxyUser(stdVarAccess, submitUsr, adminSet); - - String enableExecuteOnce = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_LABEL_EXECUTEONCE, "true"); - // default executeOnce-mode - if (Boolean.parseBoolean(enableExecuteOnce)) { - labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); - } else { - labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); - } - String codePath = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_CODE_PATH); - Object extraArgsObj = stdVarAccess.getVar(Object.class, AppKeys.JOB_EXTRA_ARGUMENTS); - if (extraArgsObj != null - && extraArgsObj instanceof String[] - && StringUtils.isBlank(code) - && StringUtils.isBlank(codePath)) { - String[] extraArgs = (String[]) extraArgsObj; - codePath = extraArgs[0]; - if (extraArgs.length > 1) { - runtimeMap.put( - LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); - } - } - - if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { - code = ExecutionUtils.readFile(codePath); - } - - executionMap.put(LinkisKeys.KEY_CODE, code); - labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); - labelMap.put(LinkisKeys.KEY_CODETYPE, runType); - labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); - sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); - runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); - - desc.setCreator(creator); - desc.setParamConfMap(confMap); - desc.setParamRunTimeMap(runtimeMap); - desc.setParamVarsMap(varMap); - desc.setLabelMap(labelMap); - desc.setSourceMap(sourceMap); - desc.setExecutionMap(executionMap); - desc.setSubmitUser(submitUsr); - desc.setProxyUser(proxyUsr); - desc.setJobContentMap(jobContentMap); - - return desc; - } - - @Override - protected LinkisOnceJobData buildJobData() { - LinkisOnceJobData data = new LinkisOnceJobData(); - data.setOnceJobAdapter(this.onceJobAdapter); - if (logListener == null) { - logger.warn("logListener is not registered, will not be able to display log"); - } else { - data.registerincLogListener(logListener); - } - return data; - } - - @Override - protected JobOperator buildJobOperator() { - // OnceJob is Stateful, should not have an operator - return null; - } - - @Override - protected PresentWay buildPresentWay() { - PresentWayImpl presentWay = new PresentWayImpl(); - String outputPath = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); - - presentWay.setPath(outputPath); - presentWay.setMode(PresentModeImpl.STDOUT); - presentWay.setDisplayMetaAndLogo( - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.LINKIS_COMMON_DIAPLAY_META_LOGO, true)); - if (StringUtils.isNotBlank(outputPath)) { - presentWay.setMode(PresentModeImpl.TEXT_FILE); - } - - return presentWay; - } - - @Override - public LinkisOnceJob build() { - LinkisOnceDesc desc = buildJobDesc(); - ((LinkisOnceJob) targetObj).setJobDesc(desc); - LinkisOnceJobData data = buildJobData(); - ((LinkisOnceJob) targetObj).setJobData(data); - data.getOnceJobAdapter().init(desc); - ((LinkisOnceJob) targetObj).setAsync(isAsync); - targetObj.setOperator(buildJobOperator()); - targetObj.setPresentWay(buildPresentWay()); - return (LinkisOnceJob) super.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisSubmitJobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisSubmitJobBuilder.java deleted file mode 100644 index 6804f090562..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisSubmitJobBuilder.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisSubmitJob; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.application.utils.ExecutionUtils; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.operator.JobOperatorFactory; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; - -import org.apache.commons.lang3.StringUtils; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisSubmitJobBuilder extends JobBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisSubmitJobBuilder.class); - - private LinkisClientListener logListener; - private Boolean isAsync = false; - - public LinkisSubmitJobBuilder setLogListener(LinkisClientListener observer) { - this.logListener = observer; - return this; - } - - public LinkisSubmitJobBuilder setAsync(Boolean async) { - isAsync = async; - return this; - } - - @Override - protected LinkisSubmitJob getTargetNewInstance() { - return new LinkisSubmitJob(); - } - - @Override - protected LinkisSubmitDesc buildJobDesc() { - LinkisSubmitDesc desc = new LinkisSubmitDesc(); - - Map confMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_CONF); - Map runtimeMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_RUNTIME); - Map varMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_VAR); - Map labelMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_LABEL); - Map sourceMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_SOURCE); - Map executionMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_EXEC); - - confMap = confMap == null ? new HashMap<>() : confMap; - runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; - varMap = varMap == null ? new HashMap<>() : varMap; - labelMap = labelMap == null ? new HashMap<>() : labelMap; - sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; - executionMap = executionMap == null ? new HashMap<>() : executionMap; - - /** remove key prefix of all keys in map type params. e.g. kv in confMap, labelMap etc. */ - confMap = ProcessKeyUtils.removePrefixForKeysInMap(confMap); - runtimeMap = ProcessKeyUtils.removePrefixForKeysInMap(runtimeMap); - labelMap = ProcessKeyUtils.removePrefixForKeysInMap(labelMap); - sourceMap = ProcessKeyUtils.removePrefixForKeysInMap(sourceMap); - executionMap = ProcessKeyUtils.removePrefixForKeysInMap(executionMap); - - /** remove key prefix of non-map type params */ - for (String key : stdVarAccess.getAllVarKeys()) { - Object val = stdVarAccess.getVar(Object.class, key); - if (!(val instanceof Map) && val != null) { - // note that we allow it to overwrite existing values in map - if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_CONF)) { - ProcessKeyUtils.removePrefixAndPutValToMap(confMap, key, val, AppKeys.JOB_PARAM_CONF); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_VAR)) { - ProcessKeyUtils.removePrefixAndPutValToMap(varMap, key, val, AppKeys.JOB_PARAM_VAR); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_RUNTIME)) { - ProcessKeyUtils.removePrefixAndPutValToMap( - runtimeMap, key, val, AppKeys.JOB_PARAM_RUNTIME); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_EXEC)) { - ProcessKeyUtils.removePrefixAndPutValToMap(executionMap, key, val, AppKeys.JOB_EXEC); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_LABEL)) { - ProcessKeyUtils.removePrefixAndPutValToMap(labelMap, key, val, AppKeys.JOB_LABEL); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_SOURCE)) { - ProcessKeyUtils.removePrefixAndPutValToMap(sourceMap, key, val, AppKeys.JOB_SOURCE); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.LINKIS_CLIENT_COMMON)) { - // do nothing - } else { - // confMap.put(key, stdVarAccess.getVar(Object.class, key)); - } - } - } - - String creator; - if (!isAsync) { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_DEFAULT); - } else { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_ASYNC_DEFAULT); - } - String code = stdVarAccess.getVar(String.class, AppKeys.JOB_EXEC_CODE); - String engineType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_ENGINE_TYPE); - String runType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_CODE_TYPE); - String scriptPath = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); - - String osUser = sysVarAccess.getVar(String.class, AppKeys.LINUX_USER_KEY); - String[] adminUsers = StringUtils.split(AppKeys.ADMIN_USERS, ','); - Set adminSet = new HashSet<>(); - for (String admin : adminUsers) { - adminSet.add(admin); - } - String submitUsr = ExecutionUtils.getSubmitUser(stdVarAccess, osUser, adminSet); - String proxyUsr = ExecutionUtils.getProxyUser(stdVarAccess, submitUsr, adminSet); - - String enableExecuteOnce = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_LABEL_EXECUTEONCE, "true"); - // default executeOnce-mode - if (Boolean.parseBoolean(enableExecuteOnce)) { - labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); - } else { - labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); - } - String codePath = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_CODE_PATH); - Object extraArgsObj = stdVarAccess.getVar(Object.class, AppKeys.JOB_EXTRA_ARGUMENTS); - if (extraArgsObj != null - && extraArgsObj instanceof String[] - && StringUtils.isBlank(code) - && StringUtils.isBlank(codePath)) { - String[] extraArgs = (String[]) extraArgsObj; - codePath = extraArgs[0]; - if (extraArgs.length > 1) { - runtimeMap.put( - LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); - } - } - - if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { - code = ExecutionUtils.readFile(codePath); - } - - executionMap.put(LinkisKeys.KEY_CODE, code); - labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); - labelMap.put(LinkisKeys.KEY_CODETYPE, runType); - labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); - sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); - runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); - - desc.setCreator(creator); - desc.setParamConfMap(confMap); - desc.setParamRunTimeMap(runtimeMap); - desc.setParamVarsMap(varMap); - desc.setLabelMap(labelMap); - desc.setSourceMap(sourceMap); - desc.setExecutionMap(executionMap); - desc.setSubmitUser(submitUsr); - desc.setProxyUser(proxyUsr); - - return desc; - } - - @Override - protected LinkisJobData buildJobData() { - LinkisJobDataImpl data = new LinkisJobDataImpl(); - if (logListener == null) { - logger.warn("logListener is not registered, will not be able to display log"); - } else { - data.registerincLogListener(logListener); - } - return data; - } - - @Override - protected LinkisJobOperator buildJobOperator() { - LinkisJobOperator oper; - try { - oper = (LinkisJobOperator) JobOperatorFactory.getReusable(AppKeys.REUSABLE_UJES_CLIENT); - } catch (Exception e) { - throw new LinkisClientRuntimeException( - "BLD0012", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Failed to get a valid operator.", - e); - } - return oper; - } - - @Override - protected PresentWay buildPresentWay() { - PresentWayImpl presentWay = new PresentWayImpl(); - String outputPath = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); - - presentWay.setPath(outputPath); - presentWay.setMode(PresentModeImpl.STDOUT); - presentWay.setDisplayMetaAndLogo( - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.LINKIS_COMMON_DIAPLAY_META_LOGO, true)); - if (StringUtils.isNotBlank(outputPath)) { - presentWay.setMode(PresentModeImpl.TEXT_FILE); - } - - return presentWay; - } - - @Override - public LinkisSubmitJob build() { - ((LinkisSubmitJob) targetObj).setJobDesc(buildJobDesc()); - ((LinkisSubmitJob) targetObj).setJobData(buildJobData()); - ((LinkisSubmitJob) targetObj).setAsync(isAsync); - targetObj.setOperator(buildJobOperator()); - targetObj.setPresentWay(buildPresentWay()); - return (LinkisSubmitJob) super.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtils.java deleted file mode 100644 index 9527a8f513a..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtils.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppKeys; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.Map; - -public class ProcessKeyUtils { - - public static Map removePrefixForKeysInMap(Map map) { - final String[] PREFIX = - new String[] { - AppKeys.JOB_PARAM_CONF, - AppKeys.JOB_PARAM_RUNTIME, - AppKeys.JOB_PARAM_VAR, - AppKeys.JOB_EXEC, - AppKeys.JOB_SOURCE, - AppKeys.JOB_LABEL, - AppKeys.JOB_CONTENT - }; - for (String prefix : PREFIX) { - map = removePrefixForKeysInMap(map, prefix); - } - return map; - } - - public static void removePrefixAndPutValToMap( - Map map, String key, Object value, String prefix) { - String realKey = getRealKey(key, prefix); - if (StringUtils.isNotBlank(realKey) && !(value instanceof Map)) { - map.put(realKey, value); - } - } - - private static Map removePrefixForKeysInMap( - Map map, String prefix) { - if (map == null) { - return null; - } - Map newMap = new HashMap<>(); - for (String key : map.keySet()) { - String realKey = getRealKey(key, prefix); - if (StringUtils.isNotBlank(realKey)) { - if (StringUtils.startsWith(key, prefix)) { - newMap.put(realKey, map.get(key)); - } else { - newMap.put(key, map.get(key)); - } - } - } - return newMap; - } - - private static String getRealKey(String key, String prefix) { - String realKey = key; - if (StringUtils.startsWith(key, prefix)) { - realKey = StringUtils.substring(key, prefix.length() + 1); - } - return realKey; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobData.java deleted file mode 100644 index 3d24570bf2a..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobData.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.common.entity.job.JobData; - -public interface LinkisJobData extends JobData, Cloneable { - - String getExecID(); - - float getJobProgress(); - - Integer getErrCode(); - - String getErrDesc(); - - boolean isSuccess(); - - void setSuccess(boolean success); - - void updateByOperResult(LinkisOperResultAdapter adapter); - - LinkisJobData clone() throws CloneNotSupportedException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobDataImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobDataImpl.java deleted file mode 100644 index 227e8c2170e..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobDataImpl.java +++ /dev/null @@ -1,514 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; -import org.apache.linkis.cli.application.observer.event.LogStartEvent; -import org.apache.linkis.cli.application.observer.event.TriggerEvent; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.observer.listener.TriggerEventListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.common.entity.job.JobStatus; - -import java.util.Arrays; -import java.util.Date; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.LinkedBlockingDeque; - -public class LinkisJobDataImpl - implements LinkisJobData, LinkisLogData, LinkisResultData, Cloneable { - - private String jobID; - private String user; - private JobStatus jobStatus = null; - private String message; - private Exception exception; - private String execID; - private float progress = 0.0f; - private Boolean incLogMode; - private LinkedBlockingDeque logBuffer = new LinkedBlockingDeque(); - private String logPath; // remote path for job log - private Integer nextLogLineIdx; - private Boolean hasNextLogLine; - private String resultLocation; - private String[] resultSetPaths = null; // remote paths for job result set - private LinkedBlockingDeque resultContent = new LinkedBlockingDeque<>(); - private Boolean hasNextResultPage; - private Integer errCode = null; - private String errDesc = null; - private boolean success = false; - private String instance; - private String umUser; - private String simpleExecId; - private String executionCode; - private String engineType; - private String runType; - private Long costTime; - private Date createdTime; - private Date updatedTime; - private Date engineStartTime; - private String executeApplicationName; - private String requestApplicationName; - - private LinkisClientEvent logstartEvent = new LogStartEvent(); - private TriggerEvent logFinevent = new TriggerEvent(); - private TriggerEventListener logFinListener = new TriggerEventListener(); - private TriggerEvent resultFinEvent = new TriggerEvent(); - private TriggerEventListener resultFinListener = new TriggerEventListener(); - - private boolean hasResult = true; - - { - logFinevent.register(logFinListener); - resultFinEvent.register(resultFinListener); - } - - @Override - public String getJobID() { - return jobID; - } - - public void setJobId(String jobId) { - this.jobID = jobId; - } - - @Override - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - @Override - public JobStatus getJobStatus() { - return jobStatus; - } - - public void setJobStatus(JobStatus jobStatus) { - this.jobStatus = jobStatus; - } - - @Override - public String getMessage() { - return message; - } - - @Override - public void setMessage(String message) { - this.message = message; - } - - @Override - public Exception getException() { - return exception; - } - - @Override - public void setException(Exception exception) { - this.exception = exception; - } - - @Override - public final String getExecID() { - return execID; - } - - public final void setExecID(String execID) { - this.execID = execID; - } - - @Override - public final float getJobProgress() { - return progress; - } - - public final void setJobProgress(float progress) { - this.progress = progress; - } - - @Override - public final String getLogPath() { - return logPath; - } - - public final void setLogPath(String logPath) { - this.logPath = logPath; - } - - @Override - public final String getResultLocation() { - return resultLocation; - } - - public final void setResultLocation(String resultLocation) { - this.resultLocation = resultLocation; - } - - @Override - public String[] getResultSetPaths() { - return resultSetPaths; - } - - public final void setResultSetPaths(String[] resultSetPaths) { - this.resultSetPaths = resultSetPaths; - } - - @Override - public Integer getErrCode() { - return errCode; - } - - public void setErrCode(Integer errCode) { - this.errCode = errCode; - } - - @Override - public String getErrDesc() { - return errDesc; - } - - public void setErrDesc(String errDesc) { - this.errDesc = errDesc; - } - - @Override - public void registerincLogListener(LinkisClientListener observer) { - this.logstartEvent.register(observer); - } - - @Override - public void notifyLogListener() { - if (this.logstartEvent.isRegistered()) { - logstartEvent.notifyObserver(logstartEvent, this); - } - } - - @Override - public boolean isIncLogMode() { - return this.incLogMode; - } - - @Override - public void setIncLogMode(boolean incLogMode) { - this.incLogMode = incLogMode; - } - - @Override - public String consumeLog() { - List logs = new LinkedList<>(); - this.logBuffer.drainTo(logs, this.logBuffer.size()); - StringBuilder tmp = new StringBuilder(); - for (String str : logs) { - tmp.append(str); - } - return tmp.toString(); - } - - public void appendLog(String log) { - this.logBuffer.add(log); - } - - @Override - public Integer getNextLogLineIdx() { - return nextLogLineIdx; - } - - public void setNextLogLineIdx(Integer nextLogLineIdx) { - this.nextLogLineIdx = nextLogLineIdx; - } - - @Override - public Boolean hasNextLogLine() { - return hasNextLogLine; - } - - @Override - public void setHasNextLogLine(Boolean hasNextLogLine) { - this.hasNextLogLine = hasNextLogLine; - } - - @Override - public List consumeResultContent() { - List ret = new LinkedList<>(); - resultContent.drainTo(ret, resultContent.size()); - return ret; - } - - public void appendResultContent(LinkisResultSet resultContent) { - this.resultContent.add(resultContent); - } - - @Override - public Boolean hasNextResultPage() { - return hasNextResultPage; - } - - public void setHasNextResultPage(Boolean hasNextResultPage) { - this.hasNextResultPage = hasNextResultPage; - } - - @Override - public void sendLogFin() { - if (this.logFinevent != null && this.logFinevent.isRegistered()) { - this.logFinevent.notifyObserver(resultFinEvent, null); - } - } - - @Override - public boolean logFinReceived() { - return this.logFinListener.isTriggered(); - } - - @Override - public void sendResultFin() { - if (this.resultFinEvent != null && this.resultFinEvent.isRegistered()) { - this.resultFinEvent.notifyObserver(resultFinEvent, null); - } - } - - @Override - public boolean resultFinReceived() { - return this.resultFinListener.isTriggered(); - } - - @Override - public boolean hasResult() { - return hasResult; - } - - @Override - public void setHasResult(boolean hasResult) { - this.hasResult = hasResult; - } - - @Override - public boolean isSuccess() { - return success; - } - - @Override - public void setSuccess(boolean success) { - this.success = success; - } - - public String getInstance() { - return instance; - } - - public void setInstance(String instance) { - this.instance = instance; - } - - public String getUmUser() { - return umUser; - } - - public void setUmUser(String umUser) { - this.umUser = umUser; - } - - public String getSimpleExecId() { - return simpleExecId; - } - - public void setSimpleExecId(String simpleExecId) { - this.simpleExecId = simpleExecId; - } - - public String getExecutionCode() { - return executionCode; - } - - public void setExecutionCode(String executionCode) { - this.executionCode = executionCode; - } - - public String getEngineType() { - return engineType; - } - - public void setEngineType(String engineType) { - this.engineType = engineType; - } - - public String getRunType() { - return runType; - } - - public void setRunType(String runType) { - this.runType = runType; - } - - public Long getCostTime() { - return costTime; - } - - public void setCostTime(Long costTime) { - this.costTime = costTime; - } - - public Date getCreatedTime() { - return createdTime; - } - - public void setCreatedTime(Date createdTime) { - this.createdTime = createdTime; - } - - public Date getUpdatedTime() { - return updatedTime; - } - - public void setUpdatedTime(Date updatedTime) { - this.updatedTime = updatedTime; - } - - public Date getEngineStartTime() { - return engineStartTime; - } - - public void setEngineStartTime(Date engineStartTime) { - this.engineStartTime = engineStartTime; - } - - public String getExecuteApplicationName() { - return executeApplicationName; - } - - public void setExecuteApplicationName(String executeApplicationName) { - this.executeApplicationName = executeApplicationName; - } - - public String getRequestApplicationName() { - return requestApplicationName; - } - - public void setRequestApplicationName(String requestApplicationName) { - this.requestApplicationName = requestApplicationName; - } - - @Override - public void updateByOperResult(LinkisOperResultAdapter adapter) { - if (adapter.getJobID() != null) { - setJobId(adapter.getJobID()); - } - if (adapter.getUser() != null) { - setUser(adapter.getUser()); - } - if (adapter.getJobStatus() != null) { - setJobStatus(adapter.getJobStatus()); - } - if (adapter.getStrongerExecId() != null) { - setExecID(adapter.getStrongerExecId()); - } - if (adapter.getJobProgress() != null) { - setJobProgress(adapter.getJobProgress()); - } - if (adapter.getLogPath() != null) { - setLogPath(adapter.getLogPath()); - } - if (adapter.getResultLocation() != null) { - setResultLocation(adapter.getResultLocation()); - } - if (adapter.getResultSetPaths() != null) { - setResultSetPaths(adapter.getResultSetPaths()); - } - if (adapter.getErrCode() != null) { - setErrCode(adapter.getErrCode()); - } - if (adapter.getErrDesc() != null) { - setErrDesc(adapter.getErrDesc()); - } - if (adapter.getLog() != null - && adapter.getNextLogLine() != null - && adapter.hasNextLogLine() != null) { - setNextLogLineIdx(adapter.getNextLogLine()); - setHasNextLogLine(adapter.hasNextLogLine()); - appendLog(adapter.getLog()); - } - if (adapter.getResultContent() != null && adapter.resultHasNextPage() != null) { - setHasNextResultPage(adapter.resultHasNextPage()); - appendResultContent(adapter.getResultContent()); - } - if (adapter.getInstance() != null) { - setInstance(adapter.getInstance()); - } - if (adapter.getUmUser() != null) { - setUmUser(adapter.getUmUser()); - } - if (adapter.getSimpleExecId() != null) { - setSimpleExecId(adapter.getSimpleExecId()); - } - if (adapter.getExecutionCode() != null) { - setExecutionCode(adapter.getExecutionCode()); - } - if (adapter.getEngineType() != null) { - setEngineType(adapter.getEngineType()); - } - if (adapter.getRunType() != null) { - setRunType(adapter.getRunType()); - } - if (adapter.getCostTime() != null) { - setCostTime(adapter.getCostTime()); - } - if (adapter.getCreatedTime() != null) { - setCreatedTime(adapter.getCreatedTime()); - } - if (adapter.getUpdatedTime() != null) { - setUpdatedTime(adapter.getUpdatedTime()); - } - if (adapter.getEngineStartTime() != null) { - setEngineStartTime(adapter.getEngineStartTime()); - } - if (adapter.getExecuteApplicationName() != null) { - setExecuteApplicationName(adapter.getExecuteApplicationName()); - } - if (adapter.getRequestApplicationName() != null) { - setRequestApplicationName(adapter.getRequestApplicationName()); - } - } - - @Override - public LinkisJobDataImpl clone() throws CloneNotSupportedException { - LinkisJobDataImpl ret = (LinkisJobDataImpl) super.clone(); - if (logBuffer != null) { - ret.logBuffer = new LinkedBlockingDeque(this.logBuffer); - } - if (this.resultContent != null) { - ret.resultContent = new LinkedBlockingDeque<>(); - for (LinkisResultSet r1 : resultContent) { - ret.resultContent.add(r1.clone()); - } - } - if (this.resultSetPaths != null) { - ret.setResultSetPaths(Arrays.copyOf(this.resultSetPaths, this.resultSetPaths.length)); - } - /* - These be shared and hence should not be deep copied. - */ - ret.logFinevent = this.logFinevent; - ret.logFinListener = this.logFinListener; - ret.resultFinEvent = this.resultFinEvent; - ret.resultFinListener = this.resultFinListener; - - return ret; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisLogData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisLogData.java deleted file mode 100644 index 3df7cc5a479..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisLogData.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; - -public interface LinkisLogData extends LinkisJobData, Cloneable { - - void notifyLogListener(); - - void registerincLogListener(LinkisClientListener observer); - - String getLogPath(); - - Integer getNextLogLineIdx(); - - void setHasNextLogLine(Boolean hasNextLog); - - Boolean hasNextLogLine(); - - String consumeLog(); - - boolean isIncLogMode(); - - /* - incLogMode = true: for sync-submission, wait for job complete while get incremental log - incLogMode = false: for async-submission, output all log we have currently - */ - void setIncLogMode(boolean incLogMode); - - void sendLogFin(); - - boolean logFinReceived(); - - LinkisLogData clone() throws CloneNotSupportedException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisOnceJobData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisOnceJobData.java deleted file mode 100644 index 89130b62afd..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisOnceJobData.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.common.entity.job.JobStatus; - -import java.util.List; - -public class LinkisOnceJobData implements LinkisJobData, LinkisLogData, LinkisResultData { - - private SimpleOnceJobAdapter onceJobAdapter; - - private boolean hasResult = true; - - public SimpleOnceJobAdapter getOnceJobAdapter() { - return onceJobAdapter; - } - - public void setOnceJobAdapter(SimpleOnceJobAdapter onceJobAdapter) { - this.onceJobAdapter = onceJobAdapter; - } - - @Override - public void registerincLogListener(LinkisClientListener observer) { - onceJobAdapter.registerincLogListener(observer); - } - - @Override - public void notifyLogListener() { - onceJobAdapter.notifyLogListener(); - } - - @Override - public boolean isIncLogMode() { - return onceJobAdapter.isIncLogMode(); - } - - @Override - public void setIncLogMode(boolean incLogMode) { - onceJobAdapter.setIncLogMode(incLogMode); - } - - @Override - public String consumeLog() { - return onceJobAdapter.consumeLog(); - } - - public void appendLog(String log) { - onceJobAdapter.appendLog(log); - } - - @Override - public final String getLogPath() { - return onceJobAdapter.getLogPath(); - } - - public final void setLogPath(String logPath) { - onceJobAdapter.setLogPath(logPath); - } - - @Override - public Integer getNextLogLineIdx() { - return onceJobAdapter.getNextLogLineIdx(); - } - - public void setNextLogLineIdx(Integer nextLogLineIdx) { - onceJobAdapter.setNextLogLineIdx(nextLogLineIdx); - } - - @Override - public Boolean hasNextLogLine() { - return onceJobAdapter.hasNextLogLine(); - } - - @Override - public void setHasNextLogLine(Boolean hasNextLogLine) { - onceJobAdapter.setHasNextLogLine(hasNextLogLine); - } - - @Override - public List consumeResultContent() { - return onceJobAdapter.consumeResultContent(); - } - - public void appendResultContent(LinkisResultSet resultContent) { - onceJobAdapter.appendResultContent(resultContent); - } - - @Override - public Boolean hasNextResultPage() { - return onceJobAdapter.hasNextResultPage(); - } - - public void setHasNextResultPage(Boolean hasNextResultPage) { - onceJobAdapter.setHasNextResultPage(hasNextResultPage); - } - - @Override - public final String getResultLocation() { - return onceJobAdapter.getResultLocation(); - } - - public final void setResultLocation(String resultLocation) { - onceJobAdapter.setResultLocation(resultLocation); - } - - @Override - public String[] getResultSetPaths() { - return onceJobAdapter.getResultSetPaths(); - } - - public final void setResultSetPaths(String[] resultSetPaths) { - onceJobAdapter.setResultSetPaths(resultSetPaths); - } - - @Override - public void sendLogFin() { - onceJobAdapter.sendLogFin(); - } - - @Override - public boolean logFinReceived() { - return onceJobAdapter.logFinReceived(); - } - - @Override - public void sendResultFin() { - onceJobAdapter.sendResultFin(); - } - - @Override - public boolean resultFinReceived() { - return onceJobAdapter.resultFinReceived(); - } - - @Override - public boolean hasResult() { - return hasResult; - } - - @Override - public void setHasResult(boolean hasResult) { - this.hasResult = hasResult; - } - - @Override - public JobStatus getJobStatus() { - return onceJobAdapter.getJobStatus(); - } - - public void setJobStatus(JobStatus jobStatus) { - onceJobAdapter.setJobStatus(jobStatus); - } - - @Override - public String getJobID() { - return onceJobAdapter.getJobID(); - } - - @Override - public String getUser() { - return onceJobAdapter.getUser(); - } - - @Override - public String getMessage() { - return onceJobAdapter.getMessage(); - } - - @Override - public void setMessage(String message) { - onceJobAdapter.setMessage(message); - } - - @Override - public Exception getException() { - return onceJobAdapter.getException(); - } - - @Override - public void setException(Exception e) { - onceJobAdapter.setException(e); - } - - @Override - public String getExecID() { - return onceJobAdapter.getJobID(); - } // No Need - - @Override - public float getJobProgress() { - return 0; - } - - @Override - public Integer getErrCode() { - return onceJobAdapter.getErrCode(); - } - - @Override - public String getErrDesc() { - return onceJobAdapter.getErrDesc(); - } - - @Override - public boolean isSuccess() { - return onceJobAdapter.isSuccess(); - } - - @Override - public void setSuccess(boolean success) { - onceJobAdapter.setSuccess(success); - } - - @Override - public void updateByOperResult(LinkisOperResultAdapter adapter) { - // No need - } - - @Override - public LinkisOnceJobData clone() throws CloneNotSupportedException { - throw new CloneNotSupportedException(); - // return null; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultData.java deleted file mode 100644 index f8e61a790df..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultData.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import java.util.List; - -public interface LinkisResultData extends LinkisJobData, Cloneable { - - String getResultLocation(); - - String[] getResultSetPaths(); - - Boolean hasNextResultPage(); - - List consumeResultContent(); - - void sendResultFin(); - - boolean resultFinReceived(); - - boolean hasResult(); - - void setHasResult(boolean hasResult); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultSet.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultSet.java deleted file mode 100644 index 4fa1dcfe026..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultSet.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; - -public class LinkisResultSet implements Cloneable { - private int resultsetIdx; - private List> resultMeta; - private List> content; - - public LinkisResultSet() {} - - public int getResultsetIdx() { - return resultsetIdx; - } - - public void setResultsetIdx(int resultsetIdx) { - this.resultsetIdx = resultsetIdx; - } - - public List> getResultMeta() { - return resultMeta; - } - - public void setResultMeta(List> resultMeta) { - this.resultMeta = resultMeta; - } - - public List> getContent() { - return content; - } - - public void setContent(List> content) { - this.content = content; - } - - @Override - protected LinkisResultSet clone() throws CloneNotSupportedException { - LinkisResultSet ret = new LinkisResultSet(); - if (this.resultMeta != null) { - List> resultMeta = null; - ret.resultMeta = new LinkedList<>(); - for (LinkedHashMap r1 : resultMeta) { - ret.resultMeta.add((LinkedHashMap) r1.clone()); - } - } - if (this.content.size() != 0) { - ret.content = new LinkedList<>(); - for (List r1 : content) { - ret.content.add(new LinkedList<>(r1)); - } - } - return ret; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/SimpleOnceJobAdapter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/SimpleOnceJobAdapter.java deleted file mode 100644 index 69a76d62409..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/SimpleOnceJobAdapter.java +++ /dev/null @@ -1,377 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisJobStatus; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; -import org.apache.linkis.cli.application.observer.event.LogStartEvent; -import org.apache.linkis.cli.application.observer.event.TriggerEvent; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.observer.listener.TriggerEventListener; -import org.apache.linkis.cli.application.operator.once.OnceJobConstants; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.computation.client.LinkisJobBuilder$; -import org.apache.linkis.computation.client.once.simple.SimpleOnceJob; -import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder; -import org.apache.linkis.computation.client.once.simple.SubmittableSimpleOnceJob; -import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator; -import org.apache.linkis.computation.client.operator.impl.EngineConnLogs; - -import org.apache.commons.lang3.StringUtils; - -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.LinkedBlockingDeque; - -public class SimpleOnceJobAdapter implements LinkisLogData { - LinkisJobStatus jobStatus = LinkisJobStatus.UNSUBMITTED; - EngineConnLogOperator logOperator = null; - private String serverUrl; - private SimpleOnceJob onceJob; - private String engineTypeForECM; - private String message; - private Exception exception; - private boolean success; - private Boolean incLogMode; - private LinkedBlockingDeque logBuffer = new LinkedBlockingDeque(); - // private String logPath; // remote path for job log - // private Integer nextLogLineIdx = 0; - private Boolean hasNextLogLine = true; - private String resultLocation; - private String[] resultSetPaths = null; // remote paths for job result set - private LinkedBlockingDeque resultContent = new LinkedBlockingDeque<>(); - private Boolean hasNextResultPage; - private LinkisClientEvent logstartEvent = new LogStartEvent(); - private TriggerEvent logFinEvent = new TriggerEvent(); - private TriggerEventListener logFinListener = new TriggerEventListener(); - private TriggerEvent resultFinEvent = new TriggerEvent(); - private TriggerEventListener resultFinListener = new TriggerEventListener(); - - { - logFinEvent.register(logFinListener); - resultFinEvent.register(resultFinListener); - } - - public void init(LinkisOnceDesc desc) { - VarAccess stdVarAccess = desc.getStdVarAccess(); - VarAccess sysVarAccess = desc.getSysVarAccess(); - - serverUrl = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_GATEWAY_URL); - - LinkisJobBuilder$.MODULE$.setDefaultClientConfig( - UJESClientFactory.generateDWSClientConfig(stdVarAccess, sysVarAccess)); - LinkisJobBuilder$.MODULE$.setDefaultUJESClient( - UJESClientFactory.getReusable(stdVarAccess, sysVarAccess)); - - String engineTypeRaw = (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE); - engineTypeForECM = engineTypeRaw; - - if (StringUtils.isNotBlank(engineTypeRaw)) { - engineTypeForECM = StringUtils.split(engineTypeRaw, "-")[0]; - } else { - engineTypeForECM = ""; - } // TODO: remove parsing and let server side parse engineType - - onceJob = - new SimpleOnceJobBuilder() - .setCreateService(AppConstants.LINKIS_CLI) - .addExecuteUser(desc.getProxyUser()) - .setStartupParams(desc.getParamConfMap()) - .setLabels(desc.getLabelMap()) - .setRuntimeParams(desc.getParamRunTimeMap()) - .setSource(desc.getSourceMap()) - .setVariableMap(desc.getParamVarsMap()) - .setJobContent(desc.getJobContentMap()) - .build(); - } - - public String getServerUrl() { - return serverUrl; - } - - public SimpleOnceJob getOnceJob() { - return onceJob; - } - - public void setOnceJob(SimpleOnceJob onceJob) { - this.onceJob = onceJob; - } - - private void panicIfNull(Object obj) { - if (obj == null) { - throw new LinkisClientExecutionException( - "EXE0040", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Instance of " + obj.getClass().getCanonicalName() + " is null"); - } - } - - public void submit() { - panicIfNull(onceJob); - if (!(onceJob instanceof SubmittableSimpleOnceJob)) { - throw new LinkisClientExecutionException( - "EXE0041", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "onceJob is not properly initiated"); - } - ((SubmittableSimpleOnceJob) onceJob).submit(); - } - - public void kill() { - panicIfNull(onceJob); - onceJob.kill(); - } - - public String getJobID() { - return onceJob.getId(); - } - - @Override - public String getUser() { - return "TODO"; - } - - public void updateStatus() { - panicIfNull(onceJob); - String status = onceJob.getStatus(); - panicIfNull(status); - jobStatus = LinkisJobStatus.convertFromNodeStatusString(onceJob.getStatus()); - } - - public LinkisJobStatus getJobStatus() { - return this.jobStatus; - } - - public void setJobStatus(JobStatus jobStatus) { - this.jobStatus = (LinkisJobStatus) jobStatus; - } - - public void waitForComplete() { - panicIfNull(onceJob); - onceJob.waitForCompleted(); - } - - public void queryJobLogOneIteration() { - panicIfNull(onceJob); - updateStatus(); - if (logOperator == null) { - logOperator = - (EngineConnLogOperator) onceJob.getOperator(EngineConnLogOperator.OPERATOR_NAME()); - logOperator.setECMServiceInstance( - ((SubmittableSimpleOnceJob) onceJob).getECMServiceInstance()); - logOperator.setEngineConnType(engineTypeForECM); - // logOperator.setPageSize(OnceJobConstants.MAX_LOG_SIZE_ONCE); - logOperator.setIgnoreKeywords(OnceJobConstants.LOG_IGNORE_KEYWORDS); - } - EngineConnLogs logs = - (EngineConnLogs) logOperator.apply(); // for some reason we have to add type conversion, - // otherwise mvn testCompile fails - StringBuilder logBuilder = new StringBuilder(); - for (String log : logs.logs()) { - logBuilder.append(log).append(System.lineSeparator()); - } - appendLog(logBuilder.toString()); - if ((logs.logs() == null || logs.logs().size() <= 0) && jobStatus.isJobFinishedState()) { - setHasNextLogLine(false); - } - // System.out.println(logs.logs().size()); - } - - public void registerincLogListener(LinkisClientListener observer) { - this.logstartEvent.register(observer); - } - - public void notifyLogListener() { - if (this.logstartEvent.isRegistered()) { - logstartEvent.notifyObserver(logstartEvent, this); - } - } - - public boolean isIncLogMode() { - return this.incLogMode; - } - - public void setIncLogMode(boolean incLogMode) { - this.incLogMode = incLogMode; - } - - public String consumeLog() { - List logs = new LinkedList<>(); - this.logBuffer.drainTo(logs, this.logBuffer.size()); - StringBuilder tmp = new StringBuilder(); - for (String str : logs) { - tmp.append(str); - } - return tmp.toString(); - } - - public void appendLog(String log) { - this.logBuffer.add(log); - } - - public final String getLogPath() { - return null; - } - - public final void setLogPath(String logPath) { - return; - } - - public Integer getNextLogLineIdx() { - return null; - } - - public void setNextLogLineIdx(Integer nextLogLineIdx) { - return; - } - - public Boolean hasNextLogLine() { - return hasNextLogLine; - } - - public void setHasNextLogLine(Boolean hasNextLogLine) { - this.hasNextLogLine = hasNextLogLine; - } - - public List consumeResultContent() { - List ret = new LinkedList<>(); - resultContent.drainTo(ret, resultContent.size()); - return ret; - } - - public void appendResultContent(LinkisResultSet resultContent) { - this.resultContent.add(resultContent); - } - - public Boolean hasNextResultPage() { - return hasNextResultPage; - } - - public void setHasNextResultPage(Boolean hasNextResultPage) { - this.hasNextResultPage = hasNextResultPage; - } - - public final String getResultLocation() { - return resultLocation; - } - - public final void setResultLocation(String resultLocation) { - this.resultLocation = resultLocation; - } - - public String[] getResultSetPaths() { - return resultSetPaths; - } - - public final void setResultSetPaths(String[] resultSetPaths) { - this.resultSetPaths = resultSetPaths; - } - - public void sendLogFin() { - if (this.logFinEvent != null && this.logFinEvent.isRegistered()) { - this.logFinEvent.notifyObserver(resultFinEvent, null); - } - } - - public boolean logFinReceived() { - return this.logFinListener.isTriggered(); - } - - public void sendResultFin() { - if (this.resultFinEvent != null && this.resultFinEvent.isRegistered()) { - this.resultFinEvent.notifyObserver(resultFinEvent, null); - } - } - - public boolean resultFinReceived() { - return this.resultFinListener.isTriggered(); - } - - @Override - public String getMessage() { - return message; - } - - @Override - public void setMessage(String message) { - this.message = message; - } - - @Override - public Exception getException() { - return exception; - } - - @Override - public void setException(Exception e) { - this.exception = e; - } - - @Override - public String getExecID() { - return getJobID(); - } // No Need - - @Override - public float getJobProgress() { - return 0; - } - - @Override - public Integer getErrCode() { - return null; - } - - @Override - public String getErrDesc() { - return null; - } - - @Override - public boolean isSuccess() { - return success; - } - - @Override - public void setSuccess(boolean success) { - this.success = success; - } - - @Override - public void updateByOperResult(LinkisOperResultAdapter adapter) { - // No need - } - - @Override - public LinkisLogData clone() throws CloneNotSupportedException { - throw new CloneNotSupportedException(); - // return null; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisJobManDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisJobManDesc.java deleted file mode 100644 index da4f5d1c471..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisJobManDesc.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.desc; - -import org.apache.linkis.cli.common.entity.job.JobDescription; - -import java.util.Map; - -public class LinkisJobManDesc implements JobDescription { - private String jobId; - private String user; - - private Map params; - - public String getJobID() { - return jobId; - } - - public void setJobId(String jobId) { - this.jobId = jobId; - } - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public Map getParams() { - return params; - } - - public void setParams(Map params) { - this.params = params; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisOnceDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisOnceDesc.java deleted file mode 100644 index a98d99383e7..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisOnceDesc.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.desc; - -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.var.VarAccess; - -import java.util.Map; - -public class LinkisOnceDesc implements JobDescription { - - private VarAccess stdVarAccess; - private VarAccess sysVarAccess; - - private String submitUser; - private String proxyUser; - private String creator; - private Map executionMap; - private Map paramConfMap; - private Map paramRunTimeMap; - private Map paramVarsMap; - private Map labelMap; - private Map sourceMap; - private Map jobContentMap; - - public VarAccess getStdVarAccess() { - return stdVarAccess; - } - - public void setStdVarAccess(VarAccess stdVarAccess) { - this.stdVarAccess = stdVarAccess; - } - - public VarAccess getSysVarAccess() { - return sysVarAccess; - } - - public void setSysVarAccess(VarAccess sysVarAccess) { - this.sysVarAccess = sysVarAccess; - } - - public String getSubmitUser() { - return submitUser; - } - - public void setSubmitUser(String submitUser) { - this.submitUser = submitUser; - } - - public String getProxyUser() { - return proxyUser; - } - - public void setProxyUser(String proxyUser) { - this.proxyUser = proxyUser; - } - - public String getCreator() { - return creator; - } - - public void setCreator(String creator) { - this.creator = creator; - } - - public Map getParamConfMap() { - return paramConfMap; - } - - public void setParamConfMap(Map paramConfMap) { - this.paramConfMap = paramConfMap; - } - - public Map getParamRunTimeMap() { - return paramRunTimeMap; - } - - public void setParamRunTimeMap(Map paramRunTimeMap) { - this.paramRunTimeMap = paramRunTimeMap; - } - - public Map getExecutionMap() { - return executionMap; - } - - public void setExecutionMap(Map executionMap) { - this.executionMap = executionMap; - } - - public Map getParamVarsMap() { - return paramVarsMap; - } - - public void setParamVarsMap(Map paramVarsMap) { - this.paramVarsMap = paramVarsMap; - } - - public Map getSourceMap() { - return sourceMap; - } - - public void setSourceMap(Map sourceMap) { - this.sourceMap = sourceMap; - } - - public Map getLabelMap() { - return labelMap; - } - - public void setLabelMap(Map labelMap) { - this.labelMap = labelMap; - } - - public Map getJobContentMap() { - return jobContentMap; - } - - public void setJobContentMap(Map jobContentMap) { - this.jobContentMap = jobContentMap; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisSubmitDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisSubmitDesc.java deleted file mode 100644 index 49cdd1a547d..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisSubmitDesc.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.desc; - -import org.apache.linkis.cli.common.entity.job.JobDescription; - -import java.util.Map; - -public class LinkisSubmitDesc implements JobDescription { - private String submitUser; - private String proxyUser; - private String creator; - private Map executionMap; - private Map paramConfMap; - private Map paramRunTimeMap; - private Map paramVarsMap; - private Map labelMap; - private Map sourceMap; - - public String getSubmitUser() { - return submitUser; - } - - public void setSubmitUser(String submitUser) { - this.submitUser = submitUser; - } - - public String getProxyUser() { - return proxyUser; - } - - public void setProxyUser(String proxyUser) { - this.proxyUser = proxyUser; - } - - public String getCreator() { - return creator; - } - - public void setCreator(String creator) { - this.creator = creator; - } - - public Map getParamConfMap() { - return paramConfMap; - } - - public void setParamConfMap(Map paramConfMap) { - this.paramConfMap = paramConfMap; - } - - public Map getParamRunTimeMap() { - return paramRunTimeMap; - } - - public void setParamRunTimeMap(Map paramRunTimeMap) { - this.paramRunTimeMap = paramRunTimeMap; - } - - public Map getExecutionMap() { - return executionMap; - } - - public void setExecutionMap(Map executionMap) { - this.executionMap = executionMap; - } - - public Map getParamVarsMap() { - return paramVarsMap; - } - - public void setParamVarsMap(Map paramVarsMap) { - this.paramVarsMap = paramVarsMap; - } - - public Map getSourceMap() { - return sourceMap; - } - - public void setSourceMap(Map sourceMap) { - this.sourceMap = sourceMap; - } - - public Map getLabelMap() { - return labelMap; - } - - public void setLabelMap(Map labelMap) { - this.labelMap = labelMap; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubType.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubType.java deleted file mode 100644 index 5df4b07976b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubType.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.subtype; - -import org.apache.linkis.cli.common.entity.job.JobSubType; - -public enum LinkisManSubType implements JobSubType { - KILL("kill"), - LOG("log"), - DESC("desc"), - STATUS("status"), - LIST("list"), - RESULT("result"); - - private String name; - - LinkisManSubType(String name) { - this.name = name; - } - - @Override - public String getName() { - return this.name; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubType.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubType.java deleted file mode 100644 index 49d3d5ea91b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubType.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.subtype; - -import org.apache.linkis.cli.common.entity.job.JobSubType; - -public enum LinkisSubmitSubType implements JobSubType { - SUBMIT("submit"); - - private String name; - - LinkisSubmitSubType(String name) { - this.name = name; - } - - @Override - public String getName() { - return name; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisManageValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisManageValidator.java deleted file mode 100644 index 519a2ee85cf..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisManageValidator.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.validate; - -import org.apache.linkis.cli.application.interactor.job.LinkisManageJob; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.StringUtils; - -public class LinkisManageValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof LinkisManageJob)) { - throw new ValidateException( - "VLD0007", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of LinkisSubmitValidator is not instance of LinkisManageJob. Type: " - + input.getClass().getCanonicalName()); - } - boolean ok = true; - StringBuilder reasonSb = new StringBuilder(); - LinkisJobManDesc desc = ((LinkisManageJob) input).getJobDesc(); - if (StringUtils.isBlank(desc.getJobID())) { - reasonSb.append("jobId cannot be empty or blank").append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank(desc.getUser())) { - reasonSb.append("user cannot be empty or blank").append(System.lineSeparator()); - ok = false; - } - if (!ok) { - throw new ValidateException( - "VLD0008", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "LinkisJobMan validation failed. Reason: " + reasonSb.toString()); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisOnceSubmitValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisOnceSubmitValidator.java deleted file mode 100644 index fba6644700c..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisOnceSubmitValidator.java +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.validate; - -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisOnceJob; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.StringUtils; - -import java.util.Map; - -public class LinkisOnceSubmitValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof LinkisOnceJob)) { - throw new ValidateException( - "VLD0007", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of LinkisSubmitValidator is not instance of LinkisSubmitJob. Type: " - + input.getClass().getCanonicalName()); - } - boolean ok = true; - StringBuilder reasonSb = new StringBuilder(); - LinkisOnceDesc submitDesc = (LinkisOnceDesc) ((LinkisOnceJob) input).getJobDesc(); - if (StringUtils.isBlank(submitDesc.getSubmitUser())) { - reasonSb.append("Submit User cannot be empty or blank").append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank(submitDesc.getProxyUser())) { - reasonSb - .append("proxy(execute) User cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getLabelMap() == null) { - reasonSb.append("labelMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getExecutionMap() == null) { - reasonSb.append("ExecutionMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getSourceMap() == null) { - reasonSb.append("SourceMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getParamConfMap() == null) { - reasonSb.append("startupMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getParamVarsMap() == null) { - reasonSb.append("variableMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getParamRunTimeMap() == null) { - reasonSb.append("runTimeMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getJobContentMap() == null) { - reasonSb.append("jobContentMap cannot be null").append(System.lineSeparator()); - ok = false; - } - for (Map.Entry entry : submitDesc.getExecutionMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("ExecutionMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - } - for (Map.Entry entry : submitDesc.getLabelMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("LabelMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - Object val = entry.getValue(); - if (val instanceof String) { - if (StringUtils.contains((String) val, " ")) { - reasonSb - .append("LabelMap value cannot contains space character. key: ") - .append(entry.getKey()) - .append("value: ") - .append(val) - .append(System.lineSeparator()); - ok = false; - } - } - } - for (Map.Entry entry : submitDesc.getParamConfMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("startUpMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - // Object val = entry.getValue(); - // if (val instanceof String) { - // if (StringUtils.contains((String) val, " ")) { - // reasonSb.append("startUpMap value cannot contains space character. - // key: ") - // .append(entry.getKey()).append("value: ").append(val) - // .append(System.lineSeparator()); - // ok = false; - // } - // } - } - // for (Map.Entry entry : linkisJob.getParamRunTimeMap().entrySet()) - // { - // if (StringUtils.contains(entry.getKey(), " ")) { - // reasonSb.append("runtimeMap key cannot contains space character. key: - // ").append(entry.getKey()).append(System.lineSeparator()); - // ok = false; - // } - // Object val = entry.getValue(); - // if (val instanceof String) { - // if (StringUtils.contains((String) val, " ")) { - // reasonSb.append("runtimeMap value cannot contains space character. - // key: ") - // .append(entry.getKey()).append("value: ").append(val) - // .append(System.lineSeparator()); - // ok = false; - // } - // } - // } - for (Map.Entry entry : submitDesc.getParamVarsMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("variablesMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - Object val = entry.getValue(); - // if (val instanceof String) { - // if (StringUtils.contains((String) val, " ")) { - // reasonSb.append("variablesMap value cannot contains space - // character. key: ") - // .append(entry.getKey()).append("value: ").append(val) - // .append(System.lineSeparator()); - // ok = false; - // } - // } - } - for (Map.Entry entry : submitDesc.getSourceMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("sourceMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - Object val = entry.getValue(); - if (val instanceof String) { - if (StringUtils.contains((String) val, " ")) { - reasonSb - .append("sourceMap value cannot contains space character. key: ") - .append(entry.getKey()) - .append("value: ") - .append(val) - .append(System.lineSeparator()); - ok = false; - } - } - } - for (Map.Entry entry : submitDesc.getJobContentMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("jobContentMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - // Object val = entry.getValue(); - // if (val instanceof String) { - // if (StringUtils.contains((String) val, " ")) { - // reasonSb.append("jobContentMap value cannot contains space - // character. key: ") - // .append(entry.getKey()).append("value: ").append(val) - // .append(System.lineSeparator()); - // ok = false; - // } - // } - } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { - reasonSb - .append(LinkisKeys.KEY_ENGINETYPE) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { - reasonSb - .append(LinkisKeys.KEY_CODETYPE) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { - reasonSb - .append(LinkisKeys.KEY_SCRIPT_PATH) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getExecutionMap().get(LinkisKeys.KEY_CODE)) - && StringUtils.indexOfIgnoreCase( - (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") - == -1) { - reasonSb - .append(LinkisKeys.KEY_CODE) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { - reasonSb - .append(LinkisKeys.KEY_USER_CREATOR) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } else { - String userCreator = (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); - if (StringUtils.indexOf(submitDesc.getProxyUser(), "-") != -1) { - reasonSb - .append("\'proxyUser\' should not contain special character \'-\'") - .append(System.lineSeparator()); - ok = false; - } else { - int idx = StringUtils.indexOf(userCreator, "-"); - if (idx == -1) { - reasonSb - .append(LinkisKeys.KEY_USER_CREATOR) - .append("should contain exactly one character \'-\'") - .append(System.lineSeparator()); - ok = false; - } else { - String user = StringUtils.substring(userCreator, 0, idx); - String creator = StringUtils.substring(userCreator, idx + 1); - if (StringUtils.isBlank(user) || StringUtils.isBlank(creator)) { - reasonSb.append("user or creator should not be blank").append(System.lineSeparator()); - ok = false; - } else { - // String forBiddenChars = "~!$%^&*-,./?|{}[]:;'()+="; - String forBiddenChars = "-"; - if (StringUtils.containsAny(creator, forBiddenChars)) { - reasonSb - .append("\'creator\' should not contain any special characters except \'_\'") - .append(System.lineSeparator()); - ok = false; - } - } - } - } - } - if (!ok) { - throw new ValidateException( - "VLD0008", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "LinkisJob validation failed. Reason: " + reasonSb.toString()); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisSubmitValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisSubmitValidator.java deleted file mode 100644 index fa0f31dc1ef..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisSubmitValidator.java +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.validate; - -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisSubmitJob; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.StringUtils; - -import java.util.Map; - -public class LinkisSubmitValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof LinkisSubmitJob)) { - throw new ValidateException( - "VLD0007", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of LinkisSubmitValidator is not instance of LinkisSubmitJob. Type: " - + input.getClass().getCanonicalName()); - } - boolean ok = true; - StringBuilder reasonSb = new StringBuilder(); - LinkisSubmitDesc submitDesc = ((LinkisSubmitJob) input).getJobDesc(); - if (StringUtils.isBlank(submitDesc.getSubmitUser())) { - reasonSb.append("Submit User cannot be empty or blank").append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank(submitDesc.getProxyUser())) { - reasonSb - .append("proxy(execute) User cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getLabelMap() == null) { - reasonSb.append("labelMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getExecutionMap() == null) { - reasonSb.append("ExecutionMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getSourceMap() == null) { - reasonSb.append("SourceMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getParamConfMap() == null) { - reasonSb.append("startupMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getParamVarsMap() == null) { - reasonSb.append("variableMap cannot be null").append(System.lineSeparator()); - ok = false; - } - if (submitDesc.getParamRunTimeMap() == null) { - reasonSb.append("runTimeMap cannot be null").append(System.lineSeparator()); - ok = false; - } - for (Map.Entry entry : submitDesc.getExecutionMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("ExecutionMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - } - for (Map.Entry entry : submitDesc.getLabelMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("LabelMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - Object val = entry.getValue(); - if (val instanceof String) { - if (StringUtils.contains((String) val, " ")) { - reasonSb - .append("LabelMap value cannot contains space character. key: ") - .append(entry.getKey()) - .append("value: ") - .append(val) - .append(System.lineSeparator()); - ok = false; - } - } - } - for (Map.Entry entry : submitDesc.getParamConfMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("startUpMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - // Object val = entry.getValue(); - // if (val instanceof String) { - // if (StringUtils.contains((String) val, " ")) { - // reasonSb.append("startUpMap value cannot contains space character. - // key: ") - // .append(entry.getKey()).append("value: ").append(val) - // .append(System.lineSeparator()); - // ok = false; - // } - // } - } - // for (Map.Entry entry : linkisJob.getParamRunTimeMap().entrySet()) - // { - // if (StringUtils.contains(entry.getKey(), " ")) { - // reasonSb.append("runtimeMap key cannot contains space character. key: - // ").append(entry.getKey()).append(System.lineSeparator()); - // ok = false; - // } - // Object val = entry.getValue(); - // if (val instanceof String) { - // if (StringUtils.contains((String) val, " ")) { - // reasonSb.append("runtimeMap value cannot contains space character. - // key: ") - // .append(entry.getKey()).append("value: ").append(val) - // .append(System.lineSeparator()); - // ok = false; - // } - // } - // } - for (Map.Entry entry : submitDesc.getParamVarsMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("variablesMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - Object val = entry.getValue(); - // if (val instanceof String) { - // if (StringUtils.contains((String) val, " ")) { - // reasonSb.append("variablesMap value cannot contains space - // character. key: ") - // .append(entry.getKey()).append("value: ").append(val) - // .append(System.lineSeparator()); - // ok = false; - // } - // } - } - for (Map.Entry entry : submitDesc.getSourceMap().entrySet()) { - if (StringUtils.contains(entry.getKey(), " ")) { - reasonSb - .append("sourceMap key cannot contains space character. key: ") - .append(entry.getKey()) - .append(System.lineSeparator()); - ok = false; - } - Object val = entry.getValue(); - if (val instanceof String) { - if (StringUtils.contains((String) val, " ")) { - reasonSb - .append("sourceMap value cannot contains space character. key: ") - .append(entry.getKey()) - .append("value: ") - .append(val) - .append(System.lineSeparator()); - ok = false; - } - } - } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { - reasonSb - .append(LinkisKeys.KEY_ENGINETYPE) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { - reasonSb - .append(LinkisKeys.KEY_CODETYPE) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { - reasonSb - .append(LinkisKeys.KEY_SCRIPT_PATH) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getExecutionMap().get(LinkisKeys.KEY_CODE)) - && StringUtils.indexOfIgnoreCase( - (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") - == -1) { - reasonSb - .append(LinkisKeys.KEY_CODE) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { - reasonSb - .append(LinkisKeys.KEY_USER_CREATOR) - .append(" cannot be empty or blank") - .append(System.lineSeparator()); - ok = false; - } else { - String userCreator = (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); - if (StringUtils.indexOf(submitDesc.getProxyUser(), "-") != -1) { - reasonSb - .append("\'proxyUser\' should not contain special character \'-\'") - .append(System.lineSeparator()); - ok = false; - } else { - int idx = StringUtils.indexOf(userCreator, "-"); - if (idx == -1) { - reasonSb - .append(LinkisKeys.KEY_USER_CREATOR) - .append("should contain exactly one character \'-\'") - .append(System.lineSeparator()); - ok = false; - } else { - String user = StringUtils.substring(userCreator, 0, idx); - String creator = StringUtils.substring(userCreator, idx + 1); - if (StringUtils.isBlank(user) || StringUtils.isBlank(creator)) { - reasonSb.append("user or creator should not be blank").append(System.lineSeparator()); - ok = false; - } else { - // String forBiddenChars = "~!$%^&*-,./?|{}[]:;'()+="; - String forBiddenChars = "-"; - if (StringUtils.containsAny(creator, forBiddenChars)) { - reasonSb - .append("\'creator\' should not contain any special characters except \'_\'") - .append(System.lineSeparator()); - ok = false; - } - } - } - } - } - if (!ok) { - throw new ValidateException( - "VLD0008", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "LinkisJob validation failed. Reason: " + reasonSb.toString()); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/TriggerEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/TriggerEvent.java deleted file mode 100644 index dada521b590..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/TriggerEvent.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.observer.event; - -public class TriggerEvent extends SingleObserverEvent {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/TriggerEventListener.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/TriggerEventListener.java deleted file mode 100644 index 8811441d02b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/TriggerEventListener.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.observer.listener; - -import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; - -import java.util.concurrent.atomic.AtomicBoolean; - -public class TriggerEventListener implements LinkisClientListener { - AtomicBoolean atomicFlag = new AtomicBoolean(false); - - @Override - public void update(LinkisClientEvent event, Object msg) { - atomicFlag.compareAndSet(false, true); - } - - public Boolean isTriggered() { - return atomicFlag.get(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/OperatorUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/OperatorUtils.java deleted file mode 100644 index 767d44aea11..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/OperatorUtils.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.operator; - -public class OperatorUtils { - public static int getNumOfLines(String str) { - if (str == null || str.length() == 0) { - return 0; - } - int lines = 1; - int len = str.length(); - for (int pos = 0; pos < len; pos++) { - char c = str.charAt(pos); - if (c == '\r') { - lines++; - if (pos + 1 < len && str.charAt(pos + 1) == '\n') { - pos++; - } - } else if (c == '\n') { - lines++; - } - } - return lines; - } - - public static int getFirstIndexSkippingLines(String str, Integer lines) { - if (str == null || str.length() == 0 || lines < 0) { - return -1; - } - if (lines == 0) { - return 0; - } - - int curLineIdx = 0; - int len = str.length(); - for (int pos = 0; pos < len; pos++) { - char c = str.charAt(pos); - if (c == '\r') { - curLineIdx++; - if (pos + 1 < len && str.charAt(pos + 1) == '\n') { - pos++; - } - } else if (c == '\n') { - curLineIdx++; - } else { - continue; - } - - if (curLineIdx >= lines) { - return pos + 1; - } - } - return -1; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOperator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOperator.java deleted file mode 100644 index 87c49cd141e..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOperator.java +++ /dev/null @@ -1,745 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.operator.ujes; - -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; -import org.apache.linkis.cli.application.operator.ujes.result.OpenLogResult2; -import org.apache.linkis.cli.application.operator.ujes.result.ResultSetResult2; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.common.exception.LinkisException; -import org.apache.linkis.ujes.client.UJESClient; -import org.apache.linkis.ujes.client.request.JobSubmitAction; -import org.apache.linkis.ujes.client.request.OpenLogAction; -import org.apache.linkis.ujes.client.request.ResultSetAction; -import org.apache.linkis.ujes.client.response.*; - -import org.apache.commons.lang3.StringUtils; - -import java.io.IOException; -import java.text.MessageFormat; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** Based on UjesClient */ -public class LinkisJobOperator implements JobOperator { - protected UJESClient client; - private Logger logger = LoggerFactory.getLogger(LinkisJobOperator.class); - private String serverUrl; - - public UJESClient getUJESClient() { - return client; - } - - public void setUJESClient(UJESClient client) { - this.client = client; - } - - public String getServerUrl() { - return serverUrl; - } - - public void setServerUrl(String serverUrl) { - this.serverUrl = serverUrl; - } - - public void close() { - if (null != client) { - try { - client.close(); - } catch (IOException e) { - logger.error("Close error. " + e.getMessage(), e); - } - } - } - - public void checkInit() throws LinkisClientRuntimeException { - if (client == null) { - throw new LinkisClientExecutionException( - "EXE0011", ErrorLevel.ERROR, CommonErrMsg.ExecutionInitErr, "UjesClientDriver is null"); - } - } - - /** - * submit Job but does not query for progress - * - * @param - * @return - */ - public LinkisOperResultAdapter submit(LinkisSubmitDesc jobDesc) - throws LinkisClientRuntimeException { - checkInit(); - JobSubmitResult jobSubmitResult; - try { - JobSubmitAction jobSubmitAction = - JobSubmitAction.builder() - .setUser(jobDesc.getSubmitUser()) - .addExecuteUser(jobDesc.getProxyUser()) - .setExecutionContent(jobDesc.getExecutionMap()) - .addExecuteCode((String) jobDesc.getExecutionMap().get(LinkisKeys.KEY_CODE)) - .setStartupParams(jobDesc.getParamConfMap()) - .setRuntimeParams(jobDesc.getParamRunTimeMap()) - .setVariableMap(jobDesc.getParamVarsMap()) - .setLabels(jobDesc.getLabelMap()) - .setSource(jobDesc.getSourceMap()) - .build(); - logger.info("Request info to Linkis: \n{}", Utils.GSON.toJson(jobSubmitAction)); - - /* Old API */ - // JobExecuteAction jobExecuteAction = JobExecuteAction.builder() - // .setCreator((String) - // linkisJob.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR)) - // .setUser(linkisJob.getSubmitUser()) - // .addExecuteCode((String) - // linkisJob.getExecutionMap().get(LinkisKeys.KEY_CODE)) - // .setEngineTypeStr((String) - // linkisJob.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE)) - // .setRunTypeStr((String) - // linkisJob.getExecutionMap().get(LinkisKeys.KEY_CODETYPE)) - // .setStartupParams(linkisJob.getParamConfMap()) - // .setVariableMap(linkisJob.getParamVarsMap()) - // .setScriptPath((String) - // linkisJob.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH)) - // .build(); - // logger.debug("Request info to Linkis Old: \n{}", - // Utils.GSON.toJson(jobExecuteAction)); - // jobExecuteResult = client.execute(jobExecuteAction); - - jobSubmitResult = client.submit(jobSubmitAction); - logger.info("Response info from Linkis: \n{}", Utils.GSON.toJson(jobSubmitAction)); - - } catch (Exception e) { - // must throw if exception - throw new LinkisClientExecutionException( - "EXE0011", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "Failed to submit job", e); - } - - if (jobSubmitResult == null - || 0 != jobSubmitResult.getStatus() - || StringUtils.isBlank(jobSubmitResult.getTaskID())) { - String reason; - if (jobSubmitResult == null) { - reason = "JobSubmitResult is null"; - } else if (0 != jobSubmitResult.getStatus()) { - reason = "server returns non-zero status-code"; - } else { - reason = "server returns blank TaskId"; - } - String msg = MessageFormat.format("Failed to submit job, Reason: {0}", reason); - throw new LinkisClientExecutionException( - "EXE0012", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - - return new UJESResultAdapter(jobSubmitResult); - } - - /** - * loop and get job info until we success and get a valid execID - * - * @param user - * @param taskID - * @return - */ - public LinkisOperResultAdapter queryJobInfo(String user, String taskID) - throws LinkisClientRuntimeException { - if (user == null || taskID == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - return new UJESResultAdapter(queryJobInfoInternal(user, taskID)); - } - - public LinkisOperResultAdapter queryJobStatus(String user, String taskID, String execID) - throws LinkisClientRuntimeException { - if (user == null || taskID == null || execID == null) { - throw new LinkisClientExecutionException( - "EXE0036", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "user or jobID or execID is null"); - } - checkInit(); - JobExecuteResult executeResult = new JobExecuteResult(); - executeResult.setTaskID(taskID); - executeResult.setUser(user); - executeResult.setExecID(execID); - JobStatusResult jobStatusResult = null; - int retryTime = 0; - final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - - while (retryTime++ < MAX_RETRY_TIME) { - try { - jobStatusResult = client.status(executeResult); - logger.debug("job-status: " + Utils.GSON.toJson(jobStatusResult)); - if (jobStatusResult == null || 0 != jobStatusResult.getStatus()) { - String reason; - if (jobStatusResult == null) { - reason = "jobStatusResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get job status failed. retry time : {0}/{1}. taskID={0}, Reason: {1}", - retryTime, MAX_RETRY_TIME, taskID, reason); - - logger.debug( - "", - new LinkisClientExecutionException( - "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); - } else { - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format( - "Get job status failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); - if (e instanceof LinkisException) { - msg += " " + e.toString(); - } - logger.warn(msg, e); - if (retryTime >= MAX_RETRY_TIME) { - throw new LinkisClientExecutionException( - "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - if (jobStatusResult == null || 0 != jobStatusResult.getStatus()) { - String reason; - if (jobStatusResult == null) { - reason = "jobStatusResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get status failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); - throw new LinkisClientExecutionException( - "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - return new UJESResultAdapter(jobStatusResult); - } - - private JobInfoResult queryJobInfoInternal(String user, String taskID) - throws LinkisClientRuntimeException { - checkInit(); - JobExecuteResult executeResult = new JobExecuteResult(); - executeResult.setTaskID(taskID); - executeResult.setUser(user); - JobInfoResult jobInfoResult = null; - int retryTime = 0; - final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - - while (retryTime++ < MAX_RETRY_TIME) { - try { - jobInfoResult = client.getJobInfo(executeResult); - logger.debug("job-info: " + Utils.GSON.toJson(jobInfoResult)); - if (jobInfoResult == null || 0 != jobInfoResult.getStatus()) { - String reason; - if (jobInfoResult == null) { - reason = "JobInfoResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get job info failed. retry time : {0}/{1}. taskID={0}, Reason: {1}", - retryTime, MAX_RETRY_TIME, taskID, reason); - - logger.debug( - "", - new LinkisClientExecutionException( - "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); - } else { - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format( - "Get job info failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); - if (e instanceof LinkisException) { - msg += " " + e.toString(); - } - logger.warn(msg, e); - if (retryTime >= MAX_RETRY_TIME) { - throw new LinkisClientExecutionException( - "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - if (jobInfoResult == null || 0 != jobInfoResult.getStatus()) { - String reason; - if (jobInfoResult == null) { - reason = "JobInfoResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get info failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); - throw new LinkisClientExecutionException( - "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - return jobInfoResult; - } - - public LinkisOperResultAdapter queryRunTimeLogFromLine( - String user, String taskID, String execID, int fromLine) throws LinkisClientRuntimeException { - checkInit(); - JobExecuteResult jobExecuteResult = new JobExecuteResult(); - jobExecuteResult.setUser(user); - jobExecuteResult.setTaskID(taskID); - jobExecuteResult.setExecID(execID); - - JobLogResult logResult = null; - int retryTime = 0; - final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - - while (retryTime++ < MAX_RETRY_TIME) { - try { - logResult = client.log(jobExecuteResult, fromLine, UJESConstants.MAX_LOG_SIZE); - logger.debug("runtime-log-result:" + Utils.GSON.toJson(logResult)); - if (logResult == null || 0 != logResult.getStatus()) { - String reason; - if (logResult == null) { - reason = "JobLogResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get log failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", - retryTime, MAX_RETRY_TIME, taskID, reason); - logger.debug( - "", - new LinkisClientExecutionException( - "EXE0015", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); - } else { - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format("Get log failed. Retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); - // logger.warn("", new LinkisClientExecutionException("EXE0016", - // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); - if (retryTime >= MAX_RETRY_TIME) { - throw new LinkisClientExecutionException( - "EXE0016", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - if (logResult == null || 0 != logResult.getStatus()) { - String reason; - if (logResult == null) { - reason = "JobLogResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get log failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); - // logger.warn("", new LinkisClientExecutionException("EXE0016", - // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); - throw new LinkisClientExecutionException( - "EXE0016", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - return new UJESResultAdapter(logResult); - } - - public LinkisOperResultAdapter queryPersistedLogFromLine( - String logPath, String user, String taskID, int fromLine) { - return new UJESResultAdapter( - new OpenLogResult2(queryPersistedLogInternal(logPath, user, taskID), fromLine)); - } - - private OpenLogResult queryPersistedLogInternal(String logPath, String user, String taskID) - throws LinkisClientRuntimeException { - checkInit(); - int retryCnt = 0; - final int MAX_RETRY_TIMES = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - OpenLogResult openLogResult = null; - - while (retryCnt++ < MAX_RETRY_TIMES) { - try { - openLogResult = - client.openLog( - OpenLogAction.newBuilder().setLogPath(logPath).setProxyUser(user).build()); - logger.debug("persisted-log-result:" + Utils.GSON.toJson(openLogResult)); - if (openLogResult == null - || 0 != openLogResult.getStatus() - || StringUtils.isBlank(openLogResult.getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL])) { - String reason; - if (openLogResult == null) { - reason = "OpenLogResult is null"; - } else if (0 != openLogResult.getStatus()) { - reason = "server returns non-zero status-code"; - } else { - reason = "server returns empty log"; - } - String msg = - MessageFormat.format( - "Get log from openLog failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", - retryCnt, MAX_RETRY_TIMES, taskID, reason); - logger.debug(msg); - } else { - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format( - "Get log from openLog failed. retry time : {0}/{1}", retryCnt, MAX_RETRY_TIMES); - if (e instanceof LinkisException) { - msg += " " + e.toString(); - } - logger.debug(msg, e); - if (retryCnt >= MAX_RETRY_TIMES) { - throw new LinkisClientExecutionException( - "EXE0017", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Get log from openLog failed. Retry exhausted. taskID=" + taskID, - e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - if (openLogResult == null - || 0 != openLogResult.getStatus() - || StringUtils.isBlank(openLogResult.getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL])) { - String reason; - if (openLogResult == null) { - reason = "OpenLogResult is null"; - } else if (0 != openLogResult.getStatus()) { - reason = "server returns non-zero status-code"; - } else { - reason = "server returns empty log"; - } - String msg = - MessageFormat.format( - "Get log from openLog failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", - retryCnt, MAX_RETRY_TIMES, taskID, reason); - logger.debug(msg); - if (retryCnt >= MAX_RETRY_TIMES) { - msg = - MessageFormat.format( - "Get log from openLog failed. Retry exhausted. taskID={0}, Reason: {1}", - taskID, reason); - throw new LinkisClientExecutionException( - "EXE0017", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - } - return openLogResult; - } - - public UJESResultAdapter queryProgress(String user, String taskID, String execId) - throws LinkisClientRuntimeException { - checkInit(); - JobExecuteResult executeResult = new JobExecuteResult(); - executeResult.setTaskID(taskID); - executeResult.setUser(user); - executeResult.setExecID(execId); - - JobProgressResult jobProgressResult = null; - int retryTime = 0; - final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - - while (retryTime++ < MAX_RETRY_TIME) { - try { - jobProgressResult = client.progress(executeResult); - if (jobProgressResult == null || 0 != jobProgressResult.getStatus()) { - String reason; - if (jobProgressResult == null) { - reason = "JobProgressResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get progress failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", - retryTime, MAX_RETRY_TIME, taskID, reason); - logger.debug(msg); - } else { - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format( - "Get progress failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); - if (e instanceof LinkisException) { - msg += " " + e.toString(); - } - logger.warn(msg, e); - if (retryTime >= MAX_RETRY_TIME) { - throw new LinkisClientExecutionException( - "EXE0019", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Get progress failed. Retry exhausted. taskID=" + taskID, - e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - - if (jobProgressResult == null || 0 != jobProgressResult.getStatus()) { - String reason; - if (jobProgressResult == null) { - reason = "JobProgressResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get progress failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); - throw new LinkisClientExecutionException( - "EXE0020", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - - return new UJESResultAdapter(jobProgressResult); - } - - public LinkisOperResultAdapter queryResultSetPaths( - String user, String taskID, String resultLocation) { - checkInit(); - - JobInfoResult jobInfoResult = queryJobInfoInternal(user, taskID); - if (null == jobInfoResult) { - String msg = "Get ResultSet Failed: Cannot get a valid jobInfo"; - logger.error(msg); - throw new LinkisClientExecutionException( - "EXE0021", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - if (!jobInfoResult.isSucceed()) { - String msg = "Get ResultSet Failed: job Status is not \"Succeed\", ."; - throw new LinkisClientExecutionException( - "EXE0021", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - - if (StringUtils.isBlank(jobInfoResult.getRequestPersistTask().getResultLocation())) { - // sometimes server-side does not return this - jobInfoResult.getRequestPersistTask().setResultLocation(resultLocation); - } - - if (StringUtils.isBlank(jobInfoResult.getRequestPersistTask().getResultLocation())) { - throw new LinkisClientExecutionException( - "EXE0021", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "ResultLocation is blank."); - } - - String[] resultSetArray = null; - - int retryTime = 0; - final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - - while (retryTime++ < MAX_RETRY_TIME) { - try { - resultSetArray = jobInfoResult.getResultSetList(client); // this makes call to server - if (resultSetArray == null || 0 == resultSetArray.length) { - String reason; - if (resultSetArray == null) { - reason = "array is null"; - } else { - reason = "array length is zero"; - } - String msg = - MessageFormat.format( - "Get resultSetArray failed. retry time : {0}/{1}. taskID={2} Reason: {3}", - retryTime, MAX_RETRY_TIME, taskID, reason); - logger.debug(msg); - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format( - "Get resultSetArray failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); - if (e instanceof LinkisException) { - msg += " " + e.toString(); - } - logger.warn(msg, e); - if (retryTime >= MAX_RETRY_TIME) { - throw new LinkisClientExecutionException( - "EXE0022", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Get resultSetArray failed. Retry exhausted. taskID=" + taskID, - e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - if (resultSetArray == null || 0 == resultSetArray.length) { - String reason; - if (resultSetArray == null) { - reason = "array is null"; - } else { - reason = "array length is zero"; - } - String msg = - MessageFormat.format( - "Get resultSetArray failed. retry exhausted. taskID={0}. Reason: {1}", - taskID, reason); - logger.warn(msg); - } - return new UJESResultAdapter(resultSetArray); - } - - public LinkisOperResultAdapter queryResultSetGivenResultSetPath( - String[] resultSetPaths, int idxResultSet, String user, Integer page, Integer pageSize) { - checkInit(); - int retryTime = 0; - final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - ResultSetResult result = null; - String resultSetPath = resultSetPaths[idxResultSet]; - while (retryTime++ < MAX_RETRY_TIME) { - try { - ResultSetAction action = - ResultSetAction.builder() - .setPath(resultSetPath) - .setUser(user) - .setPage(page) - .setPageSize(pageSize) - .build(); - result = client.resultSet(action); - logger.debug("resultset-result:" + Utils.GSON.toJson(result)); - if (result == null || 0 != result.getStatus()) { - String reason; - if (result == null) { - reason = "array is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get resultSet failed. retry time : {0}/{1}. path={2}, Reason: {3}", - retryTime, MAX_RETRY_TIME, resultSetPath, reason); - logger.debug(msg); - } else { - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format( - "Get resultSet failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); - if (e instanceof LinkisException) { - msg += " " + e.toString(); - } - logger.warn(msg, e); - if (retryTime >= MAX_RETRY_TIME) { - throw new LinkisClientExecutionException( - "EXE0024", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Get resultSet failed. Retry exhausted. path=" + resultSetPath, - e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - if (result == null || 0 != result.getStatus()) { - String reason; - if (result == null) { - reason = "ResultSetResult is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Get resultSet failed. Retry exhausted. Path={0}, Reason: {1}", - resultSetPath, reason); - throw new LinkisClientExecutionException( - "EXE0024", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - return new UJESResultAdapter(new ResultSetResult2(idxResultSet, result)); - } - - public LinkisOperResultAdapter kill(String user, String taskId, String execId) - throws LinkisClientRuntimeException { - checkInit(); - - int retryTime = 0; - final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; - - JobKillResult result = null; - - while (retryTime++ < MAX_RETRY_TIME) { - try { - JobExecuteResult killRequest = new JobExecuteResult(); - killRequest.setUser(user); - killRequest.setTaskID(taskId); - killRequest.setExecID(execId); - result = client.kill(killRequest); - logger.debug("job-kill-result:" + Utils.GSON.toJson(result)); - if (result == null || 0 != result.getStatus()) { - String reason; - if (result == null) { - reason = "result is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Kill job failed. retry time : {0}/{1}. taskId={2}, Reason: {3}", - retryTime, MAX_RETRY_TIME, taskId, reason); - logger.debug(msg); - } else { - break; - } - } catch (Exception e) { - String msg = - MessageFormat.format( - "Kill job failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); - if (e instanceof LinkisException) { - msg += " " + e.toString(); - } - logger.warn(msg, e); - if (retryTime >= MAX_RETRY_TIME) { - throw new LinkisClientExecutionException( - "EXE0025", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Kill job failed. taskId={0} Retry exhausted.", - taskId, - e); - } - } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); - } - if (result == null || 0 != result.getStatus()) { - String reason; - if (result == null) { - reason = "result is null"; - } else { - reason = "server returns non-zero status-code"; - } - String msg = - MessageFormat.format( - "Kill job failed. Retry exhausted. taskId={0}, Reason: {1}", taskId, reason); - throw new LinkisClientExecutionException( - "EXE0025", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } - return new UJESResultAdapter(result); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperatorBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperatorBuilder.java deleted file mode 100644 index 466bb9b7c0a..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperatorBuilder.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.operator.ujes; - -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.core.operator.JobOperatorBuilder; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisOperatorBuilder extends JobOperatorBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisOperatorBuilder.class); - - @Override - public LinkisJobOperator build() { - - ((LinkisJobOperator) targetObj) - .setUJESClient(UJESClientFactory.getReusable(stdVarAccess, sysVarAccess)); - ((LinkisJobOperator) targetObj) - .setServerUrl(stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_GATEWAY_URL)); - - return (LinkisJobOperator) super.build(); - } - - @Override - protected LinkisJobOperator getTargetNewInstance() { - return new LinkisJobOperator(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java deleted file mode 100644 index 2767929e8d5..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.operator.ujes; - -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisConstants; -import org.apache.linkis.cli.application.interactor.validate.UJESContextValidator; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.BuilderException; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.httpclient.authentication.AuthenticationStrategy; -import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy; -import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy; -import org.apache.linkis.httpclient.dws.config.DWSClientConfig; -import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder; -import org.apache.linkis.ujes.client.UJESClient; -import org.apache.linkis.ujes.client.UJESClientImpl; - -import org.apache.commons.lang3.StringUtils; - -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class UJESClientFactory { - private static Logger logger = LoggerFactory.getLogger(UJESClientFactory.class); - - private static UJESClient client; - - public static UJESClient getReusable(VarAccess stdVarAccess, VarAccess sysVarAccess) { - if (client == null) { - synchronized (UJESClientFactory.class) { - if (client == null) { - client = getNew(stdVarAccess, sysVarAccess); - } - } - } - return client; - } - - public static UJESClient getNew(VarAccess stdVarAccess, VarAccess sysVarAccess) { - try { - DWSClientConfig config = generateDWSClientConfig(stdVarAccess, sysVarAccess); - UJESClient ret = new UJESClientImpl(config); - logger.info("Linkis ujes client inited."); - return ret; - } catch (Exception e) { - throw new LinkisClientExecutionException( - "EXE0010", ErrorLevel.ERROR, CommonErrMsg.ExecutionInitErr, "Cannot init UJESClient", e); - } - } - - public static DWSClientConfig generateDWSClientConfig( - VarAccess stdVarAccess, VarAccess sysVarAccess) { - UJESClientContext context = generateContext(stdVarAccess, sysVarAccess); - try { - AuthenticationStrategy authenticationStrategy; - if (StringUtils.isBlank(context.getAuthenticationStrategyStr()) - || !LinkisConstants.AUTH_STRATEGY_TOKEN.equalsIgnoreCase( - context.getAuthenticationStrategyStr())) { - authenticationStrategy = - new StaticAuthenticationStrategy(); // this has to be newed here otherwise - // log-in fails for static - } else { - authenticationStrategy = new TokenAuthenticationStrategy(); - } - - DWSClientConfigBuilder builder = DWSClientConfigBuilder.newBuilder(); - DWSClientConfig config = - ((DWSClientConfigBuilder) - (builder - .addServerUrl(context.getGatewayUrl()) - .connectionTimeout(30000) - .discoveryEnabled(false) - .discoveryFrequency(1, TimeUnit.MINUTES) - .loadbalancerEnabled(false) - .maxConnectionSize(5) - .retryEnabled(false) - .readTimeout(context.getReadTimeoutMills()) - .setAuthenticationStrategy(authenticationStrategy) - .setAuthTokenKey(context.getTokenKey()) - .setAuthTokenValue(context.getTokenValue()))) - .setDWSVersion(context.getDwsVersion()) - .build(); - - logger.info("Linkis ujes client inited."); - return config; - } catch (Exception e) { - throw new LinkisClientExecutionException( - "EXE0010", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Cannot init DWSClientConfig", - e); - } - } - - public static DWSClientConfig generateDWSClientConfigForBML( - VarAccess stdVarAccess, VarAccess sysVarAccess) { - UJESClientContext context = generateContext(stdVarAccess, sysVarAccess); - try { - AuthenticationStrategy authenticationStrategy; - if (StringUtils.isBlank(context.getAuthenticationStrategyStr()) - || !LinkisConstants.AUTH_STRATEGY_TOKEN.equalsIgnoreCase( - context.getAuthenticationStrategyStr())) { - authenticationStrategy = - new StaticAuthenticationStrategy(); // this has to be newed here otherwise - // log-in fails for static - } else { - authenticationStrategy = new TokenAuthenticationStrategy(); - } - - DWSClientConfigBuilder builder = DWSClientConfigBuilder.newBuilder(); - DWSClientConfig config = - ((DWSClientConfigBuilder) - (builder - .addServerUrl(context.getGatewayUrl()) - .connectionTimeout(30000) - .discoveryEnabled(false) - .discoveryFrequency(1, TimeUnit.MINUTES) - .loadbalancerEnabled(false) - .maxConnectionSize(5) - .retryEnabled(false) - .readTimeout(context.getReadTimeoutMills()) - .setAuthenticationStrategy(authenticationStrategy) - .setAuthTokenKey("BML-AUTH") - .setAuthTokenValue("BML-AUTH"))) - .setDWSVersion(context.getDwsVersion()) - .build(); - - logger.info("Linkis ujes client inited."); - return config; - } catch (Exception e) { - throw new LinkisClientExecutionException( - "EXE0010", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Cannot init DWSClientConfig", - e); - } - } - - private static UJESClientContext generateContext(VarAccess stdVarAccess, VarAccess sysVarAccess) { - String gatewayUrl = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_GATEWAY_URL); - if (StringUtils.isBlank(gatewayUrl)) { - throw new BuilderException( - "BLD0007", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Cannot build UjesClientDriverContext: gatewayUrl is empty"); - } - - String authKey = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_TOKEN_KEY); - String authValue = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_TOKEN_VALUE); - - String authenticationStrategy = - stdVarAccess.getVarOrDefault( - String.class, - AppKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, - LinkisConstants.AUTH_STRATEGY_STATIC); - - long connectionTimeout = - stdVarAccess.getVarOrDefault( - Long.class, AppKeys.UJESCLIENT_COMMON_CONNECTT_TIMEOUT, 30000L); - boolean discoveryEnabled = - stdVarAccess.getVarOrDefault( - Boolean.class, AppKeys.UJESCLIENT_COMMON_DISCOVERY_ENABLED, false); - boolean loadBalancerEnabled = - stdVarAccess.getVarOrDefault( - Boolean.class, AppKeys.UJESCLIENT_COMMON_LOADBALANCER_ENABLED, true); - int maxConnectionSize = - stdVarAccess.getVarOrDefault( - Integer.class, AppKeys.UJESCLIENT_COMMON_MAX_CONNECTION_SIZE, 5); - boolean retryEnabled = - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.UJESCLIENT_COMMON_RETRY_ENABLED, false); - long readTimeout = - stdVarAccess.getVarOrDefault(Long.class, AppKeys.UJESCLIENT_COMMON_READTIMEOUT, 30000L); - String dwsVersion = - stdVarAccess.getVarOrDefault(String.class, AppKeys.UJESCLIENT_COMMON_DWS_VERSION, "v1"); - - UJESClientContext context = new UJESClientContext(); - - context.setGatewayUrl(gatewayUrl); - context.setAuthenticationStrategyStr(authenticationStrategy); - context.setTokenKey(authKey); - context.setTokenValue(authValue); - context.setConnectionTimeout(connectionTimeout); - context.setDiscoveryEnabled(discoveryEnabled); - context.setLoadBalancerEnabled(loadBalancerEnabled); - context.setMaxConnectionSize(maxConnectionSize); - context.setRetryEnabled(retryEnabled); - context.setReadTimeoutMills(readTimeout); - context.setDwsVersion(dwsVersion); - - logger.info("==========UJES_CTX============\n" + Utils.GSON.toJson(context)); - Validator ctxValidator = new UJESContextValidator(); - ctxValidator.doValidation(context); - return context; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisLogPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisLogPresenter.java deleted file mode 100644 index 291d7314db0..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisLogPresenter.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present; - -import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.present.model.LinkisLogModel; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.CommonUtils; - -import org.apache.commons.lang3.StringUtils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisLogPresenter implements Presenter, LinkisClientListener { - private static Logger logger = LoggerFactory.getLogger(LinkisLogPresenter.class); - - @Override - public void present(Model model, PresentWay presentWay) { - if (!(model instanceof LinkisLogModel)) { - throw new PresenterException( - "PST0001", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input model for \"LinkisLogPresenter\" is not instance of \"LinkisJobIncLogModel\""); - } - LinkisLogModel logModel = (LinkisLogModel) model; - DisplayOperator displayOper = DisplayOperFactory.getDisplayOper(PresentModeImpl.STDOUT); - while (!logModel.logFinReceived()) { - String incLog = logModel.consumeLog(); - if (StringUtils.isNotEmpty(incLog)) { - displayOper.doOutput(new StdoutDisplayData(incLog)); - } - CommonUtils.doSleepQuietly(500l); - } - String incLog = logModel.consumeLog(); - if (StringUtils.isNotEmpty(incLog)) { - displayOper.doOutput(new StdoutDisplayData(incLog)); - } - } - - @Override - public void update(LinkisClientEvent event, Object msg) { - Model model = new LinkisLogModel(); - model.buildModel(msg); - this.present(model, null); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultInfoPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultInfoPresenter.java deleted file mode 100644 index 5a953fa5d78..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultInfoPresenter.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present; - -import org.apache.linkis.cli.application.present.model.LinkisResultInfoModel; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; - -public class LinkisResultInfoPresenter implements Presenter { - @Override - public void present(Model model, PresentWay presentWay) { - if (!(model instanceof LinkisResultInfoModel)) { - throw new PresenterException( - "PST0001", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input model for \"LinkisResultInfoPresenter\" is not instance of \"LinkisResultInfoModel\""); - } - DisplayOperFactory.getDisplayOper(PresentModeImpl.STDOUT) - .doOutput(new StdoutDisplayData(formatResultIndicator((LinkisResultInfoModel) model))); - } - - protected String formatResultIndicator(LinkisResultInfoModel model) { - StringBuilder infoBuilder = new StringBuilder(); - String extraMsgStr = ""; - - if (model.getMessage() != null) { - extraMsgStr = model.getMessage().toString(); - } - if (model.getJobStatus().isJobSuccess()) { - - LogUtils.getInformationLogger().info("Job execute successfully! Will try get execute result"); - infoBuilder - .append("============Result:================") - .append(System.lineSeparator()) - .append("TaskId:") - .append(model.getJobID()) - .append(System.lineSeparator()) - .append("ExecId: ") - .append(model.getExecID()) - .append(System.lineSeparator()) - .append("User:") - .append(model.getUser()) - .append(System.lineSeparator()) - .append("Current job status:") - .append(model.getJobStatus()) - .append(System.lineSeparator()) - .append("extraMsg: ") - .append(extraMsgStr) - .append(System.lineSeparator()) - .append("result: ") - .append(extraMsgStr) - .append(System.lineSeparator()); - } else if (model.getJobStatus().isJobFinishedState()) { - LogUtils.getInformationLogger().info("Job failed! Will not try get execute result."); - infoBuilder - .append("============Result:================") - .append(System.lineSeparator()) - .append("TaskId:") - .append(model.getJobID()) - .append(System.lineSeparator()) - .append("ExecId: ") - .append(model.getExecID()) - .append(System.lineSeparator()) - .append("User:") - .append(model.getUser()) - .append(System.lineSeparator()) - .append("Current job status:") - .append(model.getJobStatus()) - .append(System.lineSeparator()) - .append("extraMsg: ") - .append(extraMsgStr) - .append(System.lineSeparator()); - if (model.getErrCode() != null) { - infoBuilder.append("errCode: ").append(model.getErrCode()).append(System.lineSeparator()); - } - if (StringUtils.isNotBlank(model.getErrDesc())) { - infoBuilder.append("errDesc: ").append(model.getErrDesc()).append(System.lineSeparator()); - } - } else { - throw new PresenterException( - "PST0011", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Job is not completed but triggered ResultPresenter"); - } - return infoBuilder.toString(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultPresenter.java deleted file mode 100644 index b5e39c4e2e1..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultPresenter.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.application.present.model.LinkisResultModel; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.data.FileDisplayData; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.text.MessageFormat; -import java.util.*; - -public class LinkisResultPresenter implements Presenter { - - @Override - public void present(Model model, PresentWay presentWay) { - if (!(model instanceof LinkisResultModel)) { - throw new PresenterException( - "PST0001", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input model for \"LinkisResultPresenter\" is not instance of \"LinkisResultModel\""); - } - if (!(presentWay instanceof PresentWayImpl)) { - throw new PresenterException( - "PST0002", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input PresentWay for \"LinkisResultPresenter\" is not instance of \"PresentWayImpl\""); - } - LinkisResultModel resultModel = (LinkisResultModel) model; - PresentWayImpl presentWay1 = (PresentWayImpl) presentWay; - - if (!resultModel.getJobStatus().isJobSuccess()) { - LogUtils.getInformationLogger() - .info("JobStatus is not \'success\'. Will not retrieve result-set."); - return; - } - String msg = ""; - if (resultModel.hasResult()) { - msg = - "Retrieving result-set, may take time if result-set is large, please do not exit program."; - } else { - msg = "Your job has no result."; - } - LogUtils.getInformationLogger().info(msg); - - final DisplayOperator displayOperator = - DisplayOperFactory.getDisplayOper( - presentWay1.getMode()); // currently we don't allow printing log to file here - - int preIdx = -1; - StringBuilder resultSb = new StringBuilder(); - - while (!resultModel.resultFinReceived()) { - preIdx = presentOneIteration(resultModel, preIdx, presentWay1, resultSb, displayOperator); - CommonUtils.doSleepQuietly(500l); - } - presentOneIteration(resultModel, preIdx, presentWay1, resultSb, displayOperator); - - if (presentWay1.getMode() == PresentModeImpl.TEXT_FILE - || StringUtils.isNotBlank(presentWay1.getPath())) { - LogUtils.getInformationLogger() - .info("ResultSet has been successfully written to path: " + presentWay1.getPath()); - } - } - - protected int presentOneIteration( - LinkisResultModel resultModel, - int preIdx, - PresentWayImpl presentWay, - StringBuilder resultSb, - DisplayOperator displayOperator) { - List linkisResultSets = resultModel.consumeResultContent(); - if (linkisResultSets != null && !linkisResultSets.isEmpty()) { - for (LinkisResultSet c : linkisResultSets) { - int idxResultset = c.getResultsetIdx(); - /** - * Notice: we assume result-sets are visited one by one in non-descending order!!! i.e. - * either idxResultset == preIdx or idxResultset - preIdx == 1 i.e. resultsets[0] -> - * resultsets[1] -> ... - */ - if (idxResultset - preIdx != 0 && idxResultset - preIdx != 1) { - throw new PresenterException( - "PST0002", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Linkis resultsets are visited in descending order or are not visited one-by-one"); - } - - boolean flag = idxResultset > preIdx; - if (presentWay.isDisplayMetaAndLogo()) { - if (idxResultset - preIdx == 1) { - resultSb.setLength(0); - resultSb - .append(MessageFormat.format(AppConstants.RESULTSET_LOGO, idxResultset + 1)) - .append(System.lineSeparator()); - if (c.getResultMeta() != null) { - resultSb - .append(AppConstants.RESULTSET_META_BEGIN_LOGO) - .append(System.lineSeparator()); - resultSb.append(formatResultMeta(c.getResultMeta())); - resultSb.append(AppConstants.RESULTSET_META_END_LOGO).append(System.lineSeparator()); - } - } - } - preIdx = idxResultset; - String contentStr = formatResultContent(c.getResultMeta(), c.getContent()); - if (contentStr != null) { - resultSb.append(contentStr); - } - if (resultSb.length() != 0) { - String resultFileName = - resultModel.getUser() - + "-task-" - + resultModel.getJobID() - + "-result-" - + String.valueOf(idxResultset + 1) - + ".txt"; - displayOperator.doOutput( - new FileDisplayData(presentWay.getPath(), resultFileName, resultSb.toString(), flag)); - resultSb.setLength(0); - } - } - } - return preIdx; - } - - protected String formatResultMeta(List> metaData) { - - StringBuilder outputBuilder = new StringBuilder(); - - if (metaData == null || metaData.size() == 0) { - return null; - } - - List titles = new ArrayList<>(); - - // gather keys as title - for (LinkedHashMap mapElement : metaData) { - if (mapElement == null || mapElement.size() == 0) { - continue; - } - - Set> entrySet = mapElement.entrySet(); - if (entrySet == null) { - break; - } - for (Map.Entry entry : entrySet) { - String key = entry.getKey(); - if (key != null && !titles.contains(key)) { - titles.add(key); - outputBuilder.append(key).append("\t"); - } - } - } - - outputBuilder.append(System.lineSeparator()); - - // gather value and print to output - for (LinkedHashMap mapElement : metaData) { - if (mapElement == null || mapElement.size() == 0) { - continue; - } - String candidate; - for (String title : titles) { - if (mapElement.containsKey(title)) { - candidate = mapElement.get(title); - } else { - candidate = "NULL"; - } - outputBuilder.append(candidate).append("\t"); - } - outputBuilder.append(System.lineSeparator()); - } - return outputBuilder.toString(); - } - - protected String formatResultContent( - List> metaData, List> contentData) { - - StringBuilder outputBuilder = new StringBuilder(); - if (contentData == null || contentData.size() == 0) { // finished - return null; - } - - int listLen = contentData.size(); - for (int i = 0; i < listLen; i++) { - List listElement = contentData.get(i); - if (listElement == null || listElement.size() == 0) { - continue; - } - for (String element : listElement) { - outputBuilder.append(element).append("\t"); - } - if (i < listLen - 1) { - outputBuilder.append(System.lineSeparator()); - } - } - - return outputBuilder.toString(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java deleted file mode 100644 index acfc392e485..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -import java.util.Date; - -public class LinkisJobInfoModel implements Model { - - private String cid; - private String jobId; - private String message; - private String exception; - private String cause; - - private String taskID; - private String instance; - private String simpleExecId; - private String execId; - private String umUser; - private String executionCode; - private String logPath; - private JobStatus status; - private String engineType; - private String runType; - private Long costTime; - private Date createdTime; - private Date updatedTime; - private Date engineStartTime; - private Integer errCode; - private String errMsg; - private String executeApplicationName; - private String requestApplicationName; - private Float progress; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisJobDataImpl)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisJobInfoModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisJobDataImpl\""); - } - this.jobId = ((LinkisJobDataImpl) data).getJobID(); - this.message = ((LinkisJobDataImpl) data).getMessage(); - this.taskID = ((LinkisJobDataImpl) data).getJobID(); - this.instance = ((LinkisJobDataImpl) data).getInstance(); - this.simpleExecId = ((LinkisJobDataImpl) data).getSimpleExecId(); - this.execId = ((LinkisJobDataImpl) data).getExecID(); - this.umUser = ((LinkisJobDataImpl) data).getUmUser(); - this.executionCode = ((LinkisJobDataImpl) data).getExecutionCode(); - this.logPath = ((LinkisJobDataImpl) data).getLogPath(); - this.status = ((LinkisJobDataImpl) data).getJobStatus(); - this.engineType = ((LinkisJobDataImpl) data).getEngineType(); - this.runType = ((LinkisJobDataImpl) data).getRunType(); - this.costTime = ((LinkisJobDataImpl) data).getCostTime(); - this.createdTime = ((LinkisJobDataImpl) data).getCreatedTime(); - this.updatedTime = ((LinkisJobDataImpl) data).getUpdatedTime(); - this.engineStartTime = ((LinkisJobDataImpl) data).getEngineStartTime(); - this.errCode = ((LinkisJobDataImpl) data).getErrCode(); - this.errMsg = ((LinkisJobDataImpl) data).getErrDesc(); - this.executeApplicationName = ((LinkisJobDataImpl) data).getExecuteApplicationName(); - this.requestApplicationName = ((LinkisJobDataImpl) data).getRequestApplicationName(); - this.progress = ((LinkisJobDataImpl) data).getJobProgress(); - Exception e = ((LinkisJobDataImpl) data).getException(); - if (e != null) { - this.exception = ExceptionUtils.getMessage(e); - this.cause = ExceptionUtils.getRootCauseMessage(e); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobKillModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobKillModel.java deleted file mode 100644 index 95eb7d03c25..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobKillModel.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -public class LinkisJobKillModel implements Model { - - private String cid; - private String jobId; - private String message; - private String exception; - private String cause; - - private String execID; - private String user; - private JobStatus jobStatus; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisJobDataImpl)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisJobKillModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisJobDataImpl\""); - } - this.jobId = ((LinkisJobDataImpl) data).getJobID(); - this.message = ((LinkisJobDataImpl) data).getMessage(); - this.execID = ((LinkisJobDataImpl) data).getExecID(); - this.user = ((LinkisJobDataImpl) data).getUser(); - this.jobStatus = ((LinkisJobDataImpl) data).getJobStatus(); - Exception e = ((LinkisJobDataImpl) data).getException(); - if (e != null) { - this.exception = ExceptionUtils.getMessage(e); - this.cause = ExceptionUtils.getRootCauseMessage(e); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java deleted file mode 100644 index 503c778339c..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -public class LinkisLogModel implements Model { - private LinkisLogData data; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisLogData)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisLogModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisLogData\""); - } - this.data = (LinkisLogData) data; - } - - public String consumeLog() { - return data.consumeLog(); - } - - public boolean logFinReceived() { - return data.logFinReceived(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultInfoModel.java deleted file mode 100644 index bfa0ae6cc88..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultInfoModel.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -public class LinkisResultInfoModel implements Model { - private String jobID; - private String execID; - private String user; - private JobStatus jobStatus; - private String message; - private Integer errCode; - private String errDesc; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisResultData)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisResultInfoModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisResultData\""); - } - LinkisResultData jobData = (LinkisResultData) data; - jobID = jobData.getJobID(); - execID = jobData.getExecID(); - user = jobData.getUser(); - jobStatus = jobData.getJobStatus(); - message = jobData.getMessage(); - errCode = jobData.getErrCode(); - errDesc = jobData.getErrDesc(); - } - - public String getJobID() { - return jobID; - } - - public String getExecID() { - return execID; - } - - public String getUser() { - return user; - } - - public JobStatus getJobStatus() { - return jobStatus; - } - - public String getMessage() { - return message; - } - - public Integer getErrCode() { - return errCode; - } - - public String getErrDesc() { - return errDesc; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java deleted file mode 100644 index 1dce057c3e6..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.util.List; - -public class LinkisResultModel implements Model { - private LinkisResultData data; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisResultData)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisResultModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisResultData\""); - } - this.data = (LinkisResultData) data; - } - - public List consumeResultContent() { - return data.consumeResultContent(); - } - - public boolean resultFinReceived() { - return data.resultFinReceived(); - } - - public JobStatus getJobStatus() { - return data.getJobStatus(); - } - - public String getJobID() { - return data.getJobID(); - } - - public String getUser() { - return data.getUser(); - } - - public boolean hasResult() { - return data.hasResult(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisSubmitResultModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisSubmitResultModel.java deleted file mode 100644 index 6b622e14537..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisSubmitResultModel.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -public class LinkisSubmitResultModel implements Model { - private String jobId; - private JobStatus status; - private String message; - private String exception; - private String cause; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisJobDataImpl)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisJobInfoModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisJobDataImpl\""); - } - this.jobId = ((LinkisJobDataImpl) data).getJobID(); - this.status = ((LinkisJobDataImpl) data).getJobStatus(); - this.message = ((LinkisJobDataImpl) data).getMessage(); - Exception e = ((LinkisJobDataImpl) data).getException(); - if (e != null) { - this.exception = ExceptionUtils.getMessage(e); - this.cause = ExceptionUtils.getRootCauseMessage(e); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuite.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuite.java deleted file mode 100644 index 8b05a7cf78c..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuite.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.suite; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ResultHandler; - -import java.util.Map; - -public class ExecutionSuite { - Execution execution; - Map jobs; - ResultHandler[] resultHandlers; - - public ExecutionSuite( - Execution execution, Map jobs, ResultHandler... resultHandlers) { - this.execution = execution; - this.jobs = jobs; - this.resultHandlers = resultHandlers; - } - - public Execution getExecution() { - return execution; - } - - public void setExecution(Execution execution) { - this.execution = execution; - } - - public Map getJobs() { - return jobs; - } - - public void setJobs(Map jobs) { - this.jobs = jobs; - } - - public ResultHandler[] getResultHandlers() { - return resultHandlers; - } - - public void setResultHandlers(ResultHandler[] resultHandlers) { - this.resultHandlers = resultHandlers; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuiteFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuiteFactory.java deleted file mode 100644 index c55ee7b9e49..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuiteFactory.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.suite; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.interactor.command.LinkisCmdType; -import org.apache.linkis.cli.application.interactor.job.builder.LinkisManageJobBuilder; -import org.apache.linkis.cli.application.interactor.job.builder.LinkisOnceJobBuilder; -import org.apache.linkis.cli.application.interactor.job.builder.LinkisSubmitJobBuilder; -import org.apache.linkis.cli.application.interactor.job.subtype.LinkisManSubType; -import org.apache.linkis.cli.application.interactor.job.subtype.LinkisSubmitSubType; -import org.apache.linkis.cli.application.interactor.validate.LinkisManageValidator; -import org.apache.linkis.cli.application.interactor.validate.LinkisOnceSubmitValidator; -import org.apache.linkis.cli.application.interactor.validate.LinkisSubmitValidator; -import org.apache.linkis.cli.application.present.LinkisLogPresenter; -import org.apache.linkis.cli.application.present.LinkisResultInfoPresenter; -import org.apache.linkis.cli.application.present.LinkisResultPresenter; -import org.apache.linkis.cli.application.present.model.LinkisJobInfoModel; -import org.apache.linkis.cli.application.present.model.LinkisJobKillModel; -import org.apache.linkis.cli.application.present.model.LinkisResultInfoModel; -import org.apache.linkis.cli.application.present.model.LinkisResultModel; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.execution.AsyncSubmission; -import org.apache.linkis.cli.core.interactor.execution.Help; -import org.apache.linkis.cli.core.interactor.execution.JobManagement; -import org.apache.linkis.cli.core.interactor.execution.SyncSubmission; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.interactor.result.DefaultResultHandler; -import org.apache.linkis.cli.core.interactor.result.PresentResultHandler; -import org.apache.linkis.cli.core.present.DefaultStdOutPresenter; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ExecutionSuiteFactory { - private static Logger logger = LoggerFactory.getLogger(ExecutionSuiteFactory.class); - - public static ExecutionSuite getSuite( - CmdType cmdType, VarAccess stdVarAccess, VarAccess sysVarAccess) { - - ExecutionSuite suite; - Execution execution; - Map jobs = new HashMap<>(); - JobSubType subType; - JobBuilder jobBuilder; - Validator validator; - ResultHandler defaultHandler = new DefaultResultHandler(); - - /* - Prepare Builders and command-specific components - */ - if (cmdType == LinkisCmdType.UNIVERSAL) { - if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_KILL_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new DefaultStdOutPresenter()); - handler.setModel(new LinkisJobKillModel()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.KILL; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_STATUS_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new DefaultStdOutPresenter()); - handler.setModel(new LinkisJobInfoModel()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.STATUS; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_LOG_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder().setLogListener(new LinkisLogPresenter()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.LOG; - suite = new ExecutionSuite(execution, jobs, null, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_RESULT_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new LinkisResultPresenter()); - handler.setModel(new LinkisResultModel()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.RESULT; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_HELP_OPT)) { - execution = new Help(); - jobs.put( - "help", - new Job() { - @Override - public String getCid() { - return null; - } - - @Override - public CmdType getCmdType() { - return cmdType; - } - - @Override - public JobSubType getSubType() { - return null; - } - - @Override - public JobDescription getJobDesc() { - return null; - } - - @Override - public JobData getJobData() { - return null; - } - - @Override - public JobOperator getJobOperator() { - return null; - } - - @Override - public PresentWay getPresentWay() { - return null; - } - }); - return new ExecutionSuite(execution, jobs, null, defaultHandler); - } else { - Boolean asyncSubmission = - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.LINKIS_CLIENT_ASYNC_OPT, false); - if (asyncSubmission) { - execution = new AsyncSubmission(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new DefaultStdOutPresenter()); - handler.setModel(new LinkisJobInfoModel()); - jobBuilder = new LinkisSubmitJobBuilder().setAsync(true); - subType = LinkisSubmitSubType.SUBMIT; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - validator = new LinkisSubmitValidator(); - } else { - execution = new SyncSubmission(); - subType = LinkisSubmitSubType.SUBMIT; - PresentResultHandler handler1 = new PresentResultHandler(); - handler1.setPresenter(new LinkisResultInfoPresenter()); - handler1.setModel(new LinkisResultInfoModel()); - PresentResultHandler handler2 = new PresentResultHandler(); - handler2.setPresenter(new LinkisResultPresenter()); - handler2.setModel(new LinkisResultModel()); - - String mode = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.LINKIS_CLIENT_MODE_OPT, AppConstants.UJES_MODE); - if (StringUtils.equalsIgnoreCase(mode, AppConstants.ONCE_MODE)) { - jobBuilder = new LinkisOnceJobBuilder().setLogListener(new LinkisLogPresenter()); - ; - validator = new LinkisOnceSubmitValidator(); - } else { - jobBuilder = new LinkisSubmitJobBuilder().setLogListener(new LinkisLogPresenter()); - validator = new LinkisSubmitValidator(); - } - suite = new ExecutionSuite(execution, jobs, handler1, handler2, defaultHandler); - } - } - /* - build job - */ - Job job = - jobBuilder - .setCid(AppConstants.DUMMY_CID) // currently we don't need this - .setCmdType(cmdType) - .setJobSubType(subType) - .setStdVarAccess(stdVarAccess) - .setSysVarAccess(sysVarAccess) - .build(); - logger.info("==========JOB============\n" + Utils.GSON.toJson(job.getJobDesc())); - if (validator != null) { - validator.doValidation(job); - } - - jobs.put(job.getCid(), job); - - return suite; - } else { - throw new LinkisClientExecutionException( - "EXE0029", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Command Type is not supported"); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java deleted file mode 100644 index 82a3d8bff2c..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.utils; - -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisConstants; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.BuilderException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.io.*; -import java.util.Set; - -public class ExecutionUtils { - public static String getSubmitUser(VarAccess stdVarAccess, String osUser, Set adminSet) { - - String enableSpecifyUserStr = - stdVarAccess.getVar( - String.class, AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_USER_SPECIFICATION); - Boolean enableSpecifyUser = - Boolean.parseBoolean(enableSpecifyUserStr) || adminSet.contains(osUser); - String authenticationStrategy = - stdVarAccess.getVarOrDefault( - String.class, - AppKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, - LinkisConstants.AUTH_STRATEGY_STATIC); - - String submitUsr; - if (StringUtils.equalsIgnoreCase(authenticationStrategy, LinkisConstants.AUTH_STRATEGY_TOKEN)) { - /* - default -> use current os user - enableSpecifyUser -> -submitUser - enableSpecifyProxyUser -> -proxyUser - ADMIN_USERS can do anything - */ - if (enableSpecifyUser) { - if (stdVarAccess.hasVar(AppKeys.JOB_COMMON_SUBMIT_USER)) { - submitUsr = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_SUBMIT_USER); - if (!adminSet.contains(osUser) && adminSet.contains(submitUsr)) { - throw new BuilderException( - "BLD0010", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Cannot specify admin-user as submit-user"); - } - } else { - submitUsr = osUser; - LogUtils.getInformationLogger() - .info( - "user does not specify submit-user, will use current Linux user \"" - + osUser - + "\" by default."); - } - } else if (stdVarAccess.hasVar(AppKeys.JOB_COMMON_SUBMIT_USER)) { - submitUsr = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_SUBMIT_USER); - if (!StringUtils.equals(submitUsr, osUser)) { - throw new BuilderException( - "BLD0010", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Cannot specify submit-user when user-specification switch is off"); - } - } else { - submitUsr = osUser; - LogUtils.getInformationLogger() - .info( - "user does not specify submit-user, will use current Linux user \"" - + osUser - + "\" by default."); - } - } else if (StringUtils.equalsIgnoreCase( - authenticationStrategy, LinkisConstants.AUTH_STRATEGY_STATIC)) { - String authKey = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_TOKEN_KEY); - String submitUsrInput = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_COMMON_SUBMIT_USER, authKey); - if (StringUtils.equalsIgnoreCase(submitUsrInput, authKey)) { - submitUsr = authKey; - } else { - throw new BuilderException( - "BLD0011", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Submit-User should be the same as Auth-Key under Static-Authentication-Strategy \'"); - } - } else { - throw new BuilderException( - "BLD0011", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Authentication strategy \'" + authenticationStrategy + "\' is not supported"); - } - - return submitUsr; - } - - public static String getProxyUser( - VarAccess stdVarAccess, String submitUsr, Set adminSet) { - - String enableSpecifyPRoxyUserStr = - stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_PROXY_USER); - Boolean enableSpecifyProxyUser = - Boolean.parseBoolean(enableSpecifyPRoxyUserStr) || adminSet.contains(submitUsr); - - /* - default -> use current -submitUser user - enableSpecifyUser -> -submitUser - enableSpecifyProxyUser -> -proxyUser - ADMIN_USERS can do anything - */ - String proxyUsr; - - if (enableSpecifyProxyUser) { - if (stdVarAccess.hasVar(AppKeys.JOB_COMMON_PROXY_USER)) { - proxyUsr = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_PROXY_USER); - if (!adminSet.contains(submitUsr) && adminSet.contains(proxyUsr)) { - throw new BuilderException( - "BLD0010", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Cannot specify admin-user as proxy-user"); - } - } else { - proxyUsr = submitUsr; - LogUtils.getInformationLogger() - .info( - "user does not specify proxy-user, will use current submit-user \"" - + submitUsr - + "\" by default."); - } - } else if (stdVarAccess.hasVar(AppKeys.JOB_COMMON_PROXY_USER)) { - proxyUsr = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_PROXY_USER); - if (!StringUtils.equals(proxyUsr, submitUsr)) { - throw new BuilderException( - "BLD0010", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Cannot specify proxy-user when proxy-user-specification switch is off"); - } - } else { - proxyUsr = submitUsr; - LogUtils.getInformationLogger() - .info( - "user does not specify proxy-user, will use current submit-user \"" - + proxyUsr - + "\" by default."); - } - return proxyUsr; - } - - public static String readFile(String path) { - try { - File inputFile = new File(path); - - InputStream inputStream = new FileInputStream(inputFile); - InputStreamReader iReader = new InputStreamReader(inputStream); - BufferedReader bufReader = new BufferedReader(iReader); - - StringBuilder sb = new StringBuilder(); - StringBuilder line; - while (bufReader.ready()) { - line = new StringBuilder(bufReader.readLine()); - sb.append(line).append(System.lineSeparator()); - } - - return sb.toString(); - - } catch (FileNotFoundException fe) { - throw new BuilderException( - "BLD0005", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "User specified script file does not exist: " + path, - fe); - } catch (Exception e) { - throw new BuilderException( - "BLD0006", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Cannot read user specified script file: " + path, - e); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/Utils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/Utils.java deleted file mode 100644 index e70c32a37b2..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/Utils.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.utils; - -import org.apache.commons.lang3.StringUtils; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class Utils { - - public static final Gson GSON = - new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); - private static final Logger logger = LoggerFactory.getLogger(Utils.class); - - public static boolean isValidExecId(String execId) { - boolean ret = false; - if (StringUtils.isNotBlank(execId)) { - ret = true; - } - return ret; - } - - public static String progressInPercentage(float progress) { - return String.valueOf(progress * 100) + "%"; - } - - public static void doSleepQuietly(Long sleepMills) { - try { - Thread.sleep(sleepMills); - } catch (Exception ignore) { - // ignored - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppConstantsTest.java deleted file mode 100644 index bb16d43e47e..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppConstantsTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.constants; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class AppConstantsTest { - - @Test - @DisplayName("constTest") - public void constTest() { - - String defaultConfigName = AppConstants.DEFAULT_CONFIG_NAME; - Long jobQuerySleepMills = AppConstants.JOB_QUERY_SLEEP_MILLS; - String resultsetLogo = AppConstants.RESULTSET_LOGO; - String resultsetMetaBeginLogo = AppConstants.RESULTSET_META_BEGIN_LOGO; - String resultsetMetaEndLogo = AppConstants.RESULTSET_META_END_LOGO; - String resultsetSeparatorLogo = AppConstants.RESULTSET_SEPARATOR_LOGO; - Integer resultsetPageSize = AppConstants.RESULTSET_PAGE_SIZE; - String jobCreatorDefault = AppConstants.JOB_CREATOR_DEFAULT; - String jobCreatorAsyncDefault = AppConstants.JOB_CREATOR_ASYNC_DEFAULT; - String dummyCid = AppConstants.DUMMY_CID; - String linkisCli = AppConstants.LINKIS_CLI; - String ujesMode = AppConstants.UJES_MODE; - String onceMode = AppConstants.ONCE_MODE; - - Assertions.assertEquals("linkis-cli.properties", defaultConfigName); - Assertions.assertTrue(2000L == jobQuerySleepMills.longValue()); - Assertions.assertEquals("============ RESULT SET {0} ============", resultsetLogo); - Assertions.assertEquals("----------- META DATA ------------", resultsetMetaBeginLogo); - Assertions.assertEquals("------------ END OF META DATA ------------", resultsetMetaEndLogo); - Assertions.assertEquals("------------------------", resultsetSeparatorLogo); - Assertions.assertTrue(5000 == resultsetPageSize.intValue()); - Assertions.assertEquals("LINKISCLI", jobCreatorDefault); - Assertions.assertEquals("LINKISCLIASYNC", jobCreatorAsyncDefault); - Assertions.assertEquals("dummy", dummyCid); - Assertions.assertEquals("LinkisCli", linkisCli); - Assertions.assertEquals("ujes", ujesMode); - Assertions.assertEquals("once", onceMode); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppKeysTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppKeysTest.java deleted file mode 100644 index f2477149b87..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppKeysTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.constants; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class AppKeysTest { - - @Test - @DisplayName("constTest") - public void constTest() { - - String adminUsers = AppKeys.ADMIN_USERS; - String linkisClientNoncustomizable = AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE; - String logPathKey = AppKeys.LOG_PATH_KEY; - String logFileKey = AppKeys.LOG_FILE_KEY; - String clientConfigRootKey = AppKeys.CLIENT_CONFIG_ROOT_KEY; - String defaultConfigFileNameKey = AppKeys.DEFAULT_CONFIG_FILE_NAME_KEY; - String linuxUserKey = AppKeys.LINUX_USER_KEY; - String jobExec = AppKeys.JOB_EXEC; - String jobExecCode = AppKeys.JOB_EXEC_CODE; - String jobContent = AppKeys.JOB_CONTENT; - String jobSource = AppKeys.JOB_SOURCE; - String jobParamConf = AppKeys.JOB_PARAM_CONF; - String jobParamRuntime = AppKeys.JOB_PARAM_RUNTIME; - String jobParamVar = AppKeys.JOB_PARAM_VAR; - String jobLabel = AppKeys.JOB_LABEL; - - Assertions.assertEquals("hadoop,root,shangda", adminUsers); - Assertions.assertEquals("wds.linkis.client.noncustomizable", linkisClientNoncustomizable); - Assertions.assertEquals("log.path", logPathKey); - Assertions.assertEquals("log.file", logFileKey); - Assertions.assertEquals("conf.root", clientConfigRootKey); - Assertions.assertEquals("conf.file", defaultConfigFileNameKey); - Assertions.assertEquals("user.name", linuxUserKey); - - Assertions.assertEquals("wds.linkis.client.exec", jobExec); - Assertions.assertEquals("wds.linkis.client.exec.code", jobExecCode); - Assertions.assertEquals("wds.linkis.client.jobContent", jobContent); - Assertions.assertEquals("wds.linkis.client.source", jobSource); - - Assertions.assertEquals("wds.linkis.client.param.conf", jobParamConf); - Assertions.assertEquals("wds.linkis.client.param.runtime", jobParamRuntime); - Assertions.assertEquals("wds.linkis.client.param.var", jobParamVar); - - Assertions.assertEquals("wds.linkis.client.label", jobLabel); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdTypeTest.java deleted file mode 100644 index 16f47274d48..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdTypeTest.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class LinkisCmdTypeTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - int id = LinkisCmdType.UNIVERSAL.getId(); - String name = LinkisCmdType.UNIVERSAL.getName(); - String desc = LinkisCmdType.UNIVERSAL.getDesc(); - - Assertions.assertTrue(1 == id); - Assertions.assertEquals("linkis-cli", name); - Assertions.assertNotNull(desc); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/ProcessInputUtil.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/ProcessInputUtil.java deleted file mode 100644 index 33af32bb4b1..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/ProcessInputUtil.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command.template; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.data.ProcessedData; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.Parser; -import org.apache.linkis.cli.core.interactor.command.parser.SingleCmdParser; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.properties.PropertiesLoader; -import org.apache.linkis.cli.core.interactor.properties.PropsFilesScanner; -import org.apache.linkis.cli.core.interactor.properties.StdPropsLoader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysEnvReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysPropsReader; -import org.apache.linkis.cli.core.interactor.validate.ParsedTplValidator; -import org.apache.linkis.cli.core.interactor.var.StdVarAccess; -import org.apache.linkis.cli.core.interactor.var.SysVarAccess; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ProcessInputUtil { - private static Logger logger = LoggerFactory.getLogger(ProcessInputUtil.class); - - public static ProcessedData generateProcessedData(String[] cmdStr, CmdTemplate template) { - /* - user input - */ - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(template) - .setFitter(new SingleTplFitter()); - - ParseResult result = parser.parse(cmdStr); - - ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); - parsedTplValidator.doValidation(result.getParsedTemplate()); - - Params params = result.getParams(); - logger.debug("==========params============\n" + Utils.GSON.toJson(params)); - - Map propertiesMap = new HashMap<>(); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - System.setProperty("conf.root", "src/test/resources/conf/"); - System.setProperty("conf.file", "linkis-cli.properties"); - String configPath = System.getProperty("conf.root"); - String defaultConfFileName = System.getProperty("conf.file"); - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - String userConfPath = null; - if (params.containsParam(AppKeys.LINKIS_CLIENT_USER_CONFIG)) { - userConfPath = - (String) params.getParamItemMap().get(AppKeys.LINKIS_CLIENT_USER_CONFIG).getValue(); - } - if (StringUtils.isNotBlank(userConfPath)) { - PropertiesReader reader = - new PropsFileReader() - .setPropsId(AppKeys.LINKIS_CLIENT_USER_CONFIG) - .setPropsPath(userConfPath); - readersList.add(reader); - } else { - LogUtils.getInformationLogger() - .info("User does not provide usr-configuration file. Will use default config"); - } - readersList.add(new SysPropsReader()); - readersList.add(new SysEnvReader()); - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - ClientProperties[] loaderResult = loader.loadProperties(); - for (ClientProperties properties : loaderResult) { - propertiesMap.put(properties.getPropsId(), properties); - } - - VarAccess stdVarAccess = - new StdVarAccess() - .setCmdParams(params) - .setUserConf(propertiesMap.get(AppKeys.LINKIS_CLIENT_USER_CONFIG)) - .setDefaultConf(propertiesMap.get(AppConstants.DEFAULT_CONFIG_NAME)) - .init(); - - VarAccess sysVarAccess = - new SysVarAccess() - .setSysProp(propertiesMap.get(CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER)) - .setSysEnv(propertiesMap.get(CommonConstants.SYSTEM_ENV_IDENTIFIER)); - - return new ProcessedData(null, params.getCmdType(), stdVarAccess, sysVarAccess); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java deleted file mode 100644 index 54a5952f09a..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command.template; - -import org.apache.linkis.cli.application.constants.TestConstants; -import org.apache.linkis.cli.common.entity.command.CmdType; - -public enum TestCmdType implements CmdType { - TEST_PRIMARY(TestConstants.PRIMARY_COMMAND, 1, TestConstants.SPARK_DESC), - SPARK(TestConstants.SPARK, 1, TestConstants.SPARK_DESC); - // TEST(TestConstants.TEST_COMMAND, 0, TestConstants.TEST_DESC); - - private int id; - private String name; - private String desc; - - TestCmdType(String name, int id) { - this.id = id; - this.name = name; - this.desc = null; - } - - TestCmdType(String name, int id, String desc) { - this.id = id; - this.name = name; - this.desc = desc; - } - - @Override - public int getId() { - return this.id; - } - - @Override - public String getName() { - return this.name; - } - - @Override - public String getDesc() { - return this.desc; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java deleted file mode 100644 index 1a8959f0979..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command.template; - -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.TestConstants; -import org.apache.linkis.cli.core.interactor.command.parser.transformer.ParamKeyMapper; - -public class TestParamMapper extends ParamKeyMapper { - @Override - public void initMapperRules() { - super.updateMapping(TestConstants.PARAM_COMMON_CMD, AppKeys.JOB_EXEC_CODE); - super.updateMapping(TestConstants.PARAM_PROXY, AppKeys.LINKIS_COMMON_GATEWAY_URL); - super.updateMapping(TestConstants.PARAM_USER, AppKeys.LINKIS_COMMON_TOKEN_KEY); - super.updateMapping(TestConstants.PARAM_USR_CONF, AppKeys.LINKIS_CLIENT_USER_CONFIG); - super.updateMapping(TestConstants.PARAM_PASSWORD, AppKeys.LINKIS_COMMON_TOKEN_VALUE); - super.updateMapping(TestConstants.PARAM_PROXY_USER, AppKeys.JOB_COMMON_PROXY_USER); - - updateMapping( - TestConstants.PARAM_SPARK_EXECUTOR_CORES, TestConstants.LINKIS_SPARK_EXECUTOR_CORES); - updateMapping( - TestConstants.PARAM_SPARK_EXECUTOR_MEMORY, TestConstants.LINKIS_SPARK_EXECUTOR_MEMORY); - updateMapping( - TestConstants.PARAM_SPARK_NUM_EXECUTORS, TestConstants.LINKIS_SPARK_NUM_EXECUTORS); - updateMapping("spark.executor.instances", TestConstants.LINKIS_SPARK_NUM_EXECUTORS); - // updateMapping(SparkCommandConstants.PARAM_SPARK_NAME, - // SparkCommandConstants.LINKIS_SPARK_NAME); - updateMapping( - TestConstants.PARAM_SPARK_SHUFFLE_PARTITIONS, - TestConstants.LINKIS_SPARK_SHUFFLE_PARTITIONS); - updateMapping(TestConstants.PARAM_SPARK_RUNTYPE, AppKeys.JOB_LABEL_CODE_TYPE); - updateMapping(TestConstants.PARAM_YARN_QUEUE, TestConstants.YARN_QUEUE); - } - - // super.updateMapping("key1", "spark.cmd"); //should throw exception - // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); - // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); - -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java deleted file mode 100644 index c9274fbf71b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command.template; - -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.TestConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.interactor.command.template.AbstractCmdTemplate; -import org.apache.linkis.cli.core.interactor.command.template.option.StdOption; - -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** @description: CommandTemplate for Spark Jobs */ -public class TestSparkCmdTemplate extends AbstractCmdTemplate { - protected StdOption databaseOp = - option( - null, - TestConstants.PARAM_DB, - new String[] {"-d", "--database"}, - "specify database", - true, - ""); - protected StdOption proxyOp = - option( - null, - TestConstants.PARAM_PROXY, - new String[] {"-x", "--proxy"}, - "specify proxy url", - true, - ""); - protected StdOption userOp = - option( - null, - AppKeys.JOB_COMMON_SUBMIT_USER, - new String[] {"-u", "--user"}, - "specify user", - true, - ""); - protected StdOption confOp = - option( - null, - TestConstants.PARAM_USR_CONF, - new String[] {"-c", "--conf"}, - "specify configuration from property file", - true, - ""); - private Logger logger = LoggerFactory.getLogger(TestSparkCmdTemplate.class); - private StdOption passwordOp = - option( - null, - AppKeys.JOB_COMMON_SUBMIT_PASSWORD, - new String[] {"-pwd", "--passwd"}, - "specify user password", - true, - ""); - private StdOption syncOp = - option( - null, - TestConstants.PARAM_SYNC_KEY, - new String[] {"-sk", "--synckey"}, - "specify sync key", - true, - ""); - private StdOption proxyUserOp = - option( - null, - TestConstants.PARAM_PROXY_USER, - new String[] {"-pu", "--proxy-user"}, - "specify proxy user", - true, - ""); - - private StdOption helpOp = - option(null, TestConstants.PARAM_HELP, new String[] {"-h", "--help"}, "help info", true, ""); - - private StdOption> confMap = - option( - null, - AppKeys.JOB_PARAM_CONF, - new String[] {"-confMap"}, - "confMap", - true, - new HashMap<>()); - - private StdOption filePara = - option( - null, - TestConstants.PARAM_COMMON_FILE, - new String[] {"--file", "-f"}, - "Spark SQL File to Execute!", - true, - ""); - - private StdOption commandPara = - option( - null, - TestConstants.PARAM_COMMON_CMD, - new String[] {"--cmd"}, - "Spark SQL Command to Execute!", - true, - ""); - - private StdOption argsPara = - option( - null, - TestConstants.PARAM_COMMON_ARGS, - new String[] {"--args", "-a"}, - "Set command args, k-v pairs delimited by comma, e.g. key1=value1,key2=value2,...", - true, - ""); - - private StdOption splitPara = - option( - null, - TestConstants.PARAM_COMMON_SPLIT, - new String[] {"--split", "-s"}, - "specify the split character string", - true, - ","); - - private StdOption queuePara = - option( - null, - TestConstants.PARAM_YARN_QUEUE, - new String[] {"--queue", "-q"}, - "specify the queue", - true, - "default"); - - private StdOption namePara = - option( - null, - TestConstants.PARAM_SPARK_NAME, - new String[] {"--name", "-n"}, - "specify the application name. WARNING:this option is deprecated. Linkis does not support this variable", - true, - ""); - - private StdOption> hiveconfPara = - option( - null, - TestConstants.PARAM_SPARK_HIVECONF, - new String[] {"--hiveconf", "-hc"}, - "specify the hiveconf setting,e.g. hive.cli.print.header=false", - true, - new HashMap<>()); - - private StdOption nePara = - option( - null, - TestConstants.PARAM_SPARK_NUM_EXECUTORS, - new String[] {"--num-executors", "-ne"}, - "specify the spark application container", - true, - 3); - - private StdOption ecPara = - option( - null, - TestConstants.PARAM_SPARK_EXECUTOR_CORES, - new String[] {"--executor-cores", "-ec"}, - "specify the spark application container vcores(less than queue's max vcores)", - true, - 2); - - private StdOption emPara = - option( - null, - TestConstants.PARAM_SPARK_EXECUTOR_MEMORY, - new String[] {"--executor-memory", "-em"}, - "specify the spark application executor's memory, 1.5G-2G/vcore", - true, - "4G"); - - private StdOption spPara = - option( - null, - TestConstants.PARAM_SPARK_SHUFFLE_PARTITIONS, - new String[] {"--shuffle-partitions", "-sp"}, - "specify the spark.sql.shuffle.partitions", - true, - 200); - - private StdOption> otherPara = - option( - null, - TestConstants.PARAM_COMMON_OTHER_KV, - new String[] {"--other"}, - "specify the other parameters", - true, - new HashMap<>()); - - // private CmdOption runTypePara = option(TestConstants.PARAM_SPARK_RUNTYPE, new - // String[]{"--runtype"}, - // "specify the runtype parameters: sql pyspark scala", true, "sql"); - - public TestSparkCmdTemplate() { - super(TestCmdType.SPARK); - } - - @Override - public void checkParams() throws CommandException {} - - @Override - protected Object clone() throws CloneNotSupportedException { - return super.clone(); - } - - @Override - public TestSparkCmdTemplate getCopy() { - return (TestSparkCmdTemplate) super.getCopy(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/execution/LinkisExecutorTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/execution/LinkisExecutorTest.java deleted file mode 100644 index b2571049d3e..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/execution/LinkisExecutorTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.execution; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class LinkisExecutorTest { - - @BeforeEach - public void before() {} - - @AfterEach - public void after() {} - - /** Method: setDriver(LinkisClientDriver driver) */ - @Test - public void testSetDriver() { - // TODO: Test goes here... - } - - /** Method: setDriverTransformer(DriverTransformer driverTransformer) */ - @Test - public void testSetDriverTransformer() { - // TODO: Test goes here... - } - - /** Method: getJobID(JobSubmitExec execData) */ - @Test - public void testGetJobID() { - // TODO: Test goes here... - } - - /** Method: submit(Job job) */ - @Test - public void testDoAsyncSubmit() { - // TODO: Test goes here... - } - - /** Method: updateJobStatus(JobSubmitExec execData) */ - @Test - public void testDoUpdateProgress() { - // TODO: Test goes here... - } - - /** Method: checkSubmit(JobSubmitExec execData) */ - @Test - public void testDoCheckSubmit() { - // TODO: Test goes here... - } - - /** Method: doGetFinalResult(JobSubmitExec execData) */ - @Test - public void testDoGetFinalResult() { - // TODO: Test goes here... - } - - /** Method: checkInit() */ - @Test - public void testCheckInit() { - // TODO: Test goes here... - } - - /** Method: doKill(Job job) */ - @Test - public void testDoKill() { - // TODO: Test goes here... - } - - /** Method: doQuery(Job job) */ - @Test - public void testDoQuery() { - // TODO: Test goes here... - } - - /** Method: doTransform(Job job) */ - @Test - public void testDoTransform() { - // TODO: Test goes here... - } - - /** Method: updateExecDataByDwsResult(JobSubmitExec execData, DWSResult result) */ - @Test - public void testUpdateExecDataByDwsResult() { - // TODO: Test goes here... - /* - try { - Method method = LinkisSubmitExecutor.getClass().getMethod("updateExecDataByDwsResult", JobSubmitExec.class, DWSResult.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobBuilderTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobBuilderTest.java deleted file mode 100644 index 99836e8ec6b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobBuilderTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.data.ProcessedData; -import org.apache.linkis.cli.application.interactor.command.template.ProcessInputUtil; -import org.apache.linkis.cli.application.interactor.command.template.TestSparkCmdTemplate; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class LinkisJobBuilderTest { - JobBuilder builder; - - @BeforeEach - public void before() { - String[] cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "src/test/resources/conf/user.properties", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - // "--num-executors", "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "-confMap", - "kk=vv,kkk=vvv,spark.executor.instances=8" - }; - ProcessedData data = ProcessInputUtil.generateProcessedData(cmdStr, new TestSparkCmdTemplate()); - // builder = new LinkisJobBuilder() - // .setIdentifier(data.getIdentifier()) - // .setCmdType(data.getCmdType()) - // .setExecutionType(data.getExecutionType()) - // .setSubExecutionType(data.getSubExecutionType()) - // .setStdVarAccess(data.getStdVarAccess()) - // .setSysVarAccess(data.getSysVarAccess()); - } - - @AfterEach - public void after() {} - - /** Method: setIdentifier(String identifier) */ - @Test - public void testSetIdentifier() { - // TODO: Test goes here... - } - - /** Method: build() */ - @Test - public void testBuild() { - // TODO: Test goes here... - // LinkisJob job = builder.build(); - // System.out.println(CommonUtils.GSON.toJson(job)); - // assertEquals(job.getParamConfMap().get("spark.executor.instances"), "8"); - } - - /** Method: setStdVarAccess(VarAccess varAccess) */ - @Test - public void testSetStdVarAccess() { - // TODO: Test goes here... - } - - /** Method: setSysVarAccess(VarAccess varAccess) */ - @Test - public void testSetSysVarAccess() { - // TODO: Test goes here... - } - - /** Method: getTargetNewInstance() */ - @Test - public void testGetTargetNewInstance() { - // TODO: Test goes here... - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtilsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtilsTest.java deleted file mode 100644 index cc579bec177..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtilsTest.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppKeys; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class ProcessKeyUtilsTest { - - @Test - @DisplayName("removePrefixForKeysInMapTest") - public void removePrefixForKeysInMapTest() { - - Map map = new HashMap<>(); - map.put(AppKeys.JOB_PARAM_CONF, new Object()); - map.put("name", new Object()); - - Map stringObjectMap = ProcessKeyUtils.removePrefixForKeysInMap(map); - Assertions.assertTrue(1 == stringObjectMap.size()); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubTypeTest.java deleted file mode 100644 index 699eaaf8b08..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubTypeTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.subtype; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class LinkisManSubTypeTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - String killName = LinkisManSubType.KILL.getName(); - String logName = LinkisManSubType.LOG.getName(); - String descName = LinkisManSubType.DESC.getName(); - String statusName = LinkisManSubType.STATUS.getName(); - String listName = LinkisManSubType.LIST.getName(); - String resultName = LinkisManSubType.RESULT.getName(); - - Assertions.assertEquals("kill", killName); - Assertions.assertEquals("log", logName); - Assertions.assertEquals("desc", descName); - Assertions.assertEquals("status", statusName); - Assertions.assertEquals("list", listName); - Assertions.assertEquals("result", resultName); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubTypeTest.java deleted file mode 100644 index f05d59700e7..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubTypeTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.subtype; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class LinkisSubmitSubTypeTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - String submitName = LinkisSubmitSubType.SUBMIT.getName(); - Assertions.assertEquals("submit", submitName); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/OperatorUtilsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/OperatorUtilsTest.java deleted file mode 100644 index 53dd3591489..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/OperatorUtilsTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.operator; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class OperatorUtilsTest { - - @Test - @DisplayName("getNumOfLinesTest") - public void getNumOfLinesTest() { - - String str = "name, \n" + "names"; - int numOfLines = OperatorUtils.getNumOfLines(str); - - Assertions.assertTrue(2 == numOfLines); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml deleted file mode 100644 index 4cf08171303..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml +++ /dev/null @@ -1,29 +0,0 @@ - - - - 4.0.0 - - - org.apache.linkis - linkis-cli - ${revision} - - linkis-cli-common - jar - - diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/execution/Execution.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/execution/Execution.java deleted file mode 100644 index 23e99601eb9..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/execution/Execution.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.execution; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; - -import java.util.Map; - -public interface Execution { - /* - start execution - */ - ExecutionResult execute(Map jobs); - - /* - terminate execution(often used in shutdown hook) - */ - boolean terminate(Map jobs); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/Job.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/Job.java deleted file mode 100644 index 5339db3c090..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/Job.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.job; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; - -public interface Job { - /** Linkis-cli specified id, not server-side returned job-id/task-id */ - String getCid(); - - /** - * Command Type for this Job, should be able to use this to find out corresponding {@link - * CmdTemplate} - */ - CmdType getCmdType(); - - /** specifies which kind of sub-execution: e.g. jobManagement: status/list/log/kill; */ - JobSubType getSubType(); - - /** - * input-param/config will be stored in JobDescription information contained by this - * data-structure should be passed to server - */ - JobDescription getJobDesc(); - - /** - * data generated during execution(e.g. job status, job id, log, result etc.) is stored here - * information contained by this data-structure can be further passed to server - */ - JobData getJobData(); - - /** operates lower level components(usually encapsulates a client) */ - JobOperator getJobOperator(); - - /** decide how result should be presented */ - PresentWay getPresentWay(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobData.java deleted file mode 100644 index 10d55e65412..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobData.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.job; - -public interface JobData { - JobStatus getJobStatus(); - - String getJobID(); - - String getUser(); - - String getMessage(); - - void setMessage(String message); - - Exception getException(); - - void setException(Exception e); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobDescription.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobDescription.java deleted file mode 100644 index f6fa3e32177..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobDescription.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.job; - -/** Should store all input argument and configurations */ -public interface JobDescription {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobSubType.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobSubType.java deleted file mode 100644 index c2c8539e4b1..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobSubType.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.job; - -public interface JobSubType { - - String getName(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/operator/JobOperator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/operator/JobOperator.java deleted file mode 100644 index e4bef680126..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/operator/JobOperator.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.operator; - -public interface JobOperator {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/PresentWay.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/PresentWay.java deleted file mode 100644 index a91dd3d475b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/PresentWay.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.present; - -public interface PresentWay {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionResult.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionResult.java deleted file mode 100644 index 3d5cdef7e35..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionResult.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.result; - -import org.apache.linkis.cli.common.entity.job.Job; - -import java.util.Map; - -public interface ExecutionResult { - Map getJobs(); - - ExecutionStatus getExecutionStatus(); - - void setExecutionStatus(ExecutionStatus executionStatus); - - Exception getException(); // TODO: put exception during execution in here and do not interrupt - // execution - - void setException( - Exception exception); // TODO: put exception during execution in here and do not interrupt - // execution -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionStatus.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionStatus.java deleted file mode 100644 index 744ce3433a9..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionStatus.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.result; - -public interface ExecutionStatus {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ResultHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ResultHandler.java deleted file mode 100644 index cf10cb2bf4c..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ResultHandler.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.result; - -public interface ResultHandler { - void process(ExecutionResult executionResult); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/validate/Validator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/validate/Validator.java deleted file mode 100644 index c86e4752486..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/validate/Validator.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.validate; - -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -public interface Validator { - void doValidation(Object input) throws LinkisClientRuntimeException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/version/Version.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/version/Version.java deleted file mode 100644 index 7f005b0bbdf..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/version/Version.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.version; - -public interface Version {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/handler/ExceptionHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/handler/ExceptionHandler.java deleted file mode 100644 index cb5a756ac35..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/handler/ExceptionHandler.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.exception.handler; - -public interface ExceptionHandler { - void handle(Exception e); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/test/java/org/apache/linkis/cli/common/exception/error/ErrorLevelTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/test/java/org/apache/linkis/cli/common/exception/error/ErrorLevelTest.java deleted file mode 100644 index e6d14f71ea5..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/test/java/org/apache/linkis/cli/common/exception/error/ErrorLevelTest.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.exception.error; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class ErrorLevelTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - int infoLevel = ErrorLevel.INFO.getLevel(); - int warnLevel = ErrorLevel.WARN.getLevel(); - int errorLevel = ErrorLevel.ERROR.getLevel(); - int fatalLevel = ErrorLevel.FATAL.getLevel(); - int retryLevel = ErrorLevel.RETRY.getLevel(); - - Assertions.assertTrue(0 == infoLevel); - Assertions.assertTrue(1 == warnLevel); - Assertions.assertTrue(2 == errorLevel); - Assertions.assertTrue(3 == fatalLevel); - Assertions.assertTrue(4 == retryLevel); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml deleted file mode 100644 index 4d453144fbc..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - 4.0.0 - - - org.apache.linkis - linkis-cli - ${revision} - - linkis-cli-core - jar - - - - org.apache.linkis - linkis-cli-common - ${project.version} - - - - org.reflections - reflections - - - - - diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/AbstractBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/AbstractBuilder.java deleted file mode 100644 index 8063662cb59..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/AbstractBuilder.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.builder; - -public abstract class AbstractBuilder implements Builder { - protected T targetObj; - - public AbstractBuilder() { - reset(); - } - - protected void reset() { - targetObj = getTargetNewInstance(); - } - - @Override - public T build() { - T ret = targetObj; - reset(); - return ret; - } - - protected abstract T getTargetNewInstance(); - - // protected T getTargetNewInstance() { - // try { - // ParameterizedType pt = (ParameterizedType) this.getClass().getGenericSuperclass(); - // Class clazz = (Class) pt.getActualTypeArguments()[0]; - // targetObj = clazz.newInstance(); - // } catch (InstantiationException e) { - // throw new BuilderException("BLD0001", ErrorLevel.ERROR, - // CommonErrMsg.BuilderBuildErr, "Cannot generate new instance.", e); - // } catch (IllegalAccessException ie) { - // throw new BuilderException("BLD0001", ErrorLevel.ERROR, - // CommonErrMsg.BuilderBuildErr, "Cannot generate new instance.", ie); - // } - // return targetObj; - // } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/BuildableByVarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/BuildableByVarAccess.java deleted file mode 100644 index ca6454e9232..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/BuildableByVarAccess.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.builder; - -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.BuilderException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.lang.reflect.ParameterizedType; - -public abstract class BuildableByVarAccess extends AbstractBuilder { - protected VarAccess stdVarAccess; - protected VarAccess sysVarAccess; - - protected void checkInit() { - if (stdVarAccess == null || sysVarAccess == null) { - ParameterizedType pt = (ParameterizedType) this.getClass().getGenericSuperclass(); - Class clazz = (Class) pt.getActualTypeArguments()[0]; - throw new BuilderException( - "BLD0003", - ErrorLevel.ERROR, - CommonErrMsg.BuilderInitErr, - "Cannot init builder: " - + clazz.getCanonicalName() - + "Cause: stdVarAccess or sysVarAccess is null"); - } - stdVarAccess.checkInit(); - sysVarAccess.checkInit(); - } - - public BuildableByVarAccess setStdVarAccess(VarAccess varAccess) { - this.stdVarAccess = varAccess; - return this; - } - - public BuildableByVarAccess setSysVarAccess(VarAccess varAccess) { - this.sysVarAccess = varAccess; - return this; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/Builder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/Builder.java deleted file mode 100644 index e66ba2ee550..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/Builder.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.builder; - -public interface Builder { - T build(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonConstants.java deleted file mode 100644 index 1362539158a..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonConstants.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -public class CommonConstants { - - public static final Long JOB_QUERY_SLEEP_MILLS = 2000l; - public static final Integer REQUEST_MAX_RETRY_TIME = 3; - - public static final String UNIVERSAL_SUBCMD = "linkis-cli"; - public static final String UNIVERSAL_SUBCMD_DESC = - "command for all types of jobs supported by Linkis"; - - public static final String SUCCESS_INDICATOR = "############Execute Success!!!########"; - public static final String FAILURE_INDICATOR = "############Execute Error!!!########"; - public static final String ARRAY_SEQ = "@#@"; - public static final String ARRAY_SEQ_REGEX = "(?=([^\"]*\"[^\"]*\")*[^\"]*$)"; - public static final int MAX_NUM_OF_COMMAND_ARGUEMENTS = 10; - public static final String CONFIG_DIR = "config.path"; - public static final String[] CONFIG_EXTENSION = {"properties"}; - public static final String SYSTEM_PROPERTIES_IDENTIFIER = "SYS_PROP"; - public static final String SYSTEM_ENV_IDENTIFIER = "SYS_ENV"; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonKeys.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonKeys.java deleted file mode 100644 index 39eb2f9418a..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonKeys.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -public class CommonKeys {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandler.java deleted file mode 100644 index 25d7a04d202..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandler.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.handler; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.interactor.command.CmdTemplateFactory; -import org.apache.linkis.cli.core.present.HelpInfoPresenter; -import org.apache.linkis.cli.core.present.model.HelpInfoModel; - -public class CommandExceptionHandler implements ExceptionHandler { - @Override - public void handle(Exception e) { - if (e instanceof CommandException) { - if (((CommandException) e).requireHelp()) { - - CmdTemplate template = - CmdTemplateFactory.getTemplateOri(((CommandException) e).getCmdType()); - - if (template != null) { - HelpInfoModel model = new HelpInfoModel(); - model.buildModel(template); - - new HelpInfoPresenter().present(model, null); - } - } - } - new DefaultExceptionHandler().handle(e); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/DefaultExceptionHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/DefaultExceptionHandler.java deleted file mode 100644 index c83e711513e..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/DefaultExceptionHandler.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.handler; - -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultExceptionHandler implements ExceptionHandler { - private static Logger logger = LoggerFactory.getLogger(DefaultExceptionHandler.class); - - @Override - public void handle(Exception exception) { - if (exception instanceof LinkisClientRuntimeException) { - LinkisClientRuntimeException e = (LinkisClientRuntimeException) exception; - switch (e.getLevel()) { - case INFO: - logger.info(e.getMessage(), e); - LogUtils.getInformationLogger().info(e.getMessage()); - break; - case WARN: - logger.warn(e.getMessage(), e); - LogUtils.getInformationLogger().warn(getErrInfoWithoutStacktrace(e)); - break; - case ERROR: - logger.error(e.getMessage(), e); - LogUtils.getInformationLogger().error(getErrInfoWithoutStacktrace(e)); - break; - case FATAL: - String msg = StringUtils.substringAfter(e.getMessage(), "[ERROR]"); - logger.error(msg, e); - LogUtils.getInformationLogger().error("[FATAL]" + msg, e); - System.exit(-1); - break; - } - - } else { - logger.error(exception.getMessage(), exception); - LogUtils.getInformationLogger().error(exception.getMessage(), exception); - } - } - - private String getErrInfoWithoutStacktrace(Exception e) { - if (e == null) { - return ""; - } - StringBuilder sb = new StringBuilder(); - if (e instanceof NullPointerException) { - sb.append(ExceptionUtils.getStackTrace(e)); - } else { - sb.append(e.getMessage()); - } - if (e.getCause() != null) { - sb.append(System.lineSeparator()) - .append("Caused by: ") - .append((e.getCause().getClass().getCanonicalName())) - .append(": ") - .append(e.getCause().getMessage()); - } - return sb.toString(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/AsyncSubmission.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/AsyncSubmission.java deleted file mode 100644 index 765c5969d4f..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/AsyncSubmission.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.AsyncBackendJob; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.utils.CommonUtils; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Execute job asynchronously TODO: put exception during execution in ExecutionResult and do not - * interrupt execution - */ -public class AsyncSubmission implements Execution { - private static final Logger logger = LoggerFactory.getLogger(AsyncSubmission.class); - - @Override - public ExecutionResult execute(Map jobs) { - ExecutionStatus executionStatus; - Exception exception = null; // TODO - - if (jobs == null || jobs.size() == 0) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Null or empty Jobs is submitted to current execution"); - } - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Multiple Jobs is not Supported by current execution"); - } - - Job job = jobs.get(jobs.keySet().toArray(new String[jobs.size()])[0]); - - if (!(job instanceof AsyncBackendJob)) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Backend for \"" + job.getClass().getCanonicalName() + "\" does not support async"); - } - - if (job.getSubType() == null) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "SubExecType should not be null"); - } - - try { - ((AsyncBackendJob) job).submit(); - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - ((AsyncBackendJob) job).updateJobStatus(); - if (job.getJobData().getJobStatus().isJobSubmitted()) { - executionStatus = ExecutionStatusEnum.SUCCEED; - } else { - executionStatus = ExecutionStatusEnum.FAILED; - if (job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } - } catch (Exception e) { - exception = e; - executionStatus = ExecutionStatusEnum.FAILED; - logger.warn("Failed to submit job.", e); - } - - return new ExecutionResultImpl(jobs, executionStatus, exception); - } - - @Override - public boolean terminate(Map jobs) { - return true; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/Help.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/Help.java deleted file mode 100644 index b2d34746c6b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/Help.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.CmdTemplateFactory; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.present.HelpInfoPresenter; -import org.apache.linkis.cli.core.present.model.HelpInfoModel; - -import java.util.Map; - -public class Help implements Execution { - @Override - public ExecutionResult execute(Map jobs) { - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Multiple Jobs is not Supported by current execution"); - } - - HelpInfoModel model = new HelpInfoModel(); - Job job = jobs.values().toArray(new Job[jobs.size()])[0]; - - model.buildModel(CmdTemplateFactory.getTemplateOri(job.getCmdType())); - - new HelpInfoPresenter().present(model, null); - return new ExecutionResultImpl(null, ExecutionStatusEnum.SUCCEED, null); - } - - @Override - public boolean terminate(Map jobs) { - return true; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/JobManagement.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/JobManagement.java deleted file mode 100644 index c4741bdf682..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/JobManagement.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.ManagableBackendJob; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; - -import java.util.Map; - -public class JobManagement implements Execution { - @Override - public ExecutionResult execute(Map jobs) { - ExecutionStatus executionStatus; - Exception exception = null; // TODO - - if (jobs == null || jobs.size() == 0) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Null or empty Jobs is submitted to current execution"); - } - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Multiple Jobs is not Supported by current execution"); - } - - Job job = jobs.get(jobs.keySet().toArray(new String[jobs.size()])[0]); - - if (!(job instanceof ManagableBackendJob)) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Backend for \"" + job.getClass().getCanonicalName() + "\" is not manageable"); - } - - if (job.getSubType() == null) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "SubExecType should not be null"); - } - - try { - ((ManagableBackendJob) job).doManage(); - if (((ManagableBackendJob) job).isSuccess()) { - executionStatus = ExecutionStatusEnum.SUCCEED; - } else { - executionStatus = ExecutionStatusEnum.FAILED; - if (job.getJobData() != null && job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } - - } catch (Exception e) { - exception = e; - executionStatus = ExecutionStatusEnum.FAILED; - } - - return new ExecutionResultImpl(jobs, executionStatus, exception); - } - - @Override - public boolean terminate(Map jobs) { - return true; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/SyncSubmission.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/SyncSubmission.java deleted file mode 100644 index c143e223570..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/SyncSubmission.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Execute job synchronously. i.e. Client submit job, and wait til job finish, and get result, no - * matter what server behaves. - */ -public class SyncSubmission implements Execution { - private static final Logger logger = LoggerFactory.getLogger(SyncSubmission.class); - - @Override - public ExecutionResult execute(Map jobs) { - - ExecutionStatus executionStatus; - Exception exception = null; // TODO - - if (jobs == null || jobs.size() == 0) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Null or empty Jobs is submitted to current execution"); - } - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Multiple Jobs is not Supported by current execution"); - } - - Job job = jobs.get(jobs.keySet().toArray(new String[jobs.size()])[0]); - - if (job instanceof SyncBackendJob) { - try { - ((SyncBackendJob) job).submitAndGetResult(); - } catch (Exception e) { - exception = e; - } - } else if (job instanceof AsyncBackendJob) { - try { - ExecWithAsyncBackend(job); - } catch (Exception e) { - exception = e; - // TODO: throw or fail - } - } else { - throw new LinkisClientExecutionException( - "EXE0002", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Executor Type: \"" + job.getClass().getCanonicalName() + "\" is not Supported"); - } - - if (job.getJobData() != null - && job.getJobData().getJobStatus() != null - && job.getJobData().getJobStatus().isJobSuccess()) { - executionStatus = ExecutionStatusEnum.SUCCEED; - } else if (job.getJobData().getJobStatus() == null - || !job.getJobData().getJobStatus().isJobFinishedState()) { - executionStatus = ExecutionStatusEnum.UNDEFINED; - if (job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } else { - executionStatus = ExecutionStatusEnum.FAILED; - if (job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } - - return new ExecutionResultImpl(jobs, executionStatus, exception); - } - - @Override - public boolean terminate(Map jobs) { - boolean ok = true; - for (Job job : jobs.values()) { - if (job.getJobData() == null || job.getJobData().getJobStatus() == null) { - continue; - } - String jobId = job.getJobData().getJobID() == null ? "NULL" : job.getJobData().getJobID(); - if (job instanceof TerminatableJob) { - try { - ((TerminatableJob) job).terminate(); - } catch (Exception e) { - System.out.println( - "Failed to kill job: jobId=" + jobId + ". " + ExceptionUtils.getStackTrace(e)); - } - if (!job.getJobData().getJobStatus().isJobCancelled() - || !job.getJobData().getJobStatus().isJobFailure()) { - ok = false; - System.out.println( - "Failed to kill job: jobId=" - + jobId - + ", current status: " - + job.getJobData().getJobStatus().toString()); - } else { - System.out.println( - "Successfully killed job: jobId=" - + jobId - + ", current status: " - + job.getJobData().getJobStatus().toString()); - } - } else { - System.out.println("Job \"" + jobId + "\"" + "is not terminatable"); - } - } - return ok; - } - - private void ExecWithAsyncBackend(Job job) { - - if (!(job instanceof AsyncBackendJob)) { - throw new LinkisClientExecutionException( - "EXE0002", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "job is not instance of AsyncBackendJob"); - } - AsyncBackendJob submitJob = (AsyncBackendJob) job; - - submitJob.submit(); - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - - if (!submitJob.getJobData().getJobStatus().isJobSubmitted()) { - throw new LinkisClientExecutionException( - "EXE0005", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Retry exhausted checking job submission. Job is probably not submitted"); - } else { - // Output that job is submitted - StringBuilder infoBuilder = new StringBuilder(); - infoBuilder.append("Job is successfully submitted!").append(System.lineSeparator()); - LogUtils.getInformationLogger().info(infoBuilder.toString()); - } - - if (job instanceof LogAccessibleJob) { - /* - Non-blocking, call if back-end supports it - */ - ((LogAccessibleJob) job).startRetrieveLog(); - } - - submitJob.waitJobComplete(); - - if (submitJob.getJobData().getJobStatus().isJobFinishedState()) { - if (job instanceof ResultAccessibleJob) { - /* - Non-blocking, call if back-end supports it - */ - ((ResultAccessibleJob) job).startRetrieveResult(); - } - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AbstractJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AbstractJob.java deleted file mode 100644 index 0de5e27ce6a..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AbstractJob.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; - -public abstract class AbstractJob implements Job { - protected String cid; - protected CmdType cmdType; - protected JobSubType subType; - protected JobOperator operator; - private PresentWay presentWay; - - @Override - public String getCid() { - return cid; - } - - public void setCid(String cid) { - this.cid = cid; - } - - @Override - public CmdType getCmdType() { - return cmdType; - } - - public void setCmdType(CmdType cmdType) { - this.cmdType = cmdType; - } - - @Override - public JobSubType getSubType() { - return subType; - } - - public void setSubType(JobSubType subType) { - this.subType = subType; - } - - @Override - public JobOperator getJobOperator() { - return operator; - } - - public void setOperator(JobOperator operator) { - this.operator = operator; - } - - @Override - public PresentWay getPresentWay() { - return presentWay; - } - - public void setPresentWay(PresentWay presentWay) { - this.presentWay = presentWay; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AsyncBackendJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AsyncBackendJob.java deleted file mode 100644 index 636a361cad5..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AsyncBackendJob.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -/** - * If backend supports async-submission, i.e. submit job and then return while job is running. Then - * implement this. Note that all results return by server should be returned but stored in {@link - * JobData} - */ -public interface AsyncBackendJob extends Job { - - void submit() throws LinkisClientRuntimeException; - - void updateJobStatus() throws LinkisClientRuntimeException; - - void waitJobComplete() throws LinkisClientRuntimeException; - - void terminate() throws LinkisClientRuntimeException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/JobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/JobBuilder.java deleted file mode 100644 index 566fe3b2796..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/JobBuilder.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.core.builder.BuildableByVarAccess; - -public abstract class JobBuilder extends BuildableByVarAccess { - - public JobBuilder setCid(String cid) { - targetObj.setCid(cid); - return this; - } - - public JobBuilder setCmdType(CmdType cmdType) { - targetObj.setCmdType(cmdType); - return this; - } - - public JobBuilder setJobSubType(JobSubType subType) { - targetObj.setSubType(subType); - return this; - } - - protected abstract JobDescription buildJobDesc(); - - protected abstract JobData buildJobData(); - - protected abstract JobOperator buildJobOperator(); - - protected abstract PresentWay buildPresentWay(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/LogAccessibleJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/LogAccessibleJob.java deleted file mode 100644 index d372c444a34..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/LogAccessibleJob.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; - -/** If backend supports extracting log while job is running, then implement this interface */ -public interface LogAccessibleJob extends Job { - /** - * Retrieve log. This methods should be non-blocking and log should be appended into {@link - * JobData} - */ - void startRetrieveLog(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ManagableBackendJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ManagableBackendJob.java deleted file mode 100644 index 204134110b4..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ManagableBackendJob.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -/** - * Backend support job-management, e.g. get status, get log, kill job etc. Then implement this - * interface. - */ -public interface ManagableBackendJob extends Job { - /** - * since job management can diverge, we decide to assign it to lower-level implementation. - * implementation should use {@link JobSubType} to decide which action to take, hence {@link - * JobSubType} should not be null - */ - void doManage() throws LinkisClientRuntimeException; - - /** - * if execution is success. This can be different from {@link JobStatus} e.g. query job status, - * job may be FAIL but execution is a asuccess - */ - boolean isSuccess(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ResultAccessibleJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ResultAccessibleJob.java deleted file mode 100644 index 564ed03e108..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ResultAccessibleJob.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; - -public interface ResultAccessibleJob extends Job { - /** - * Retrieve reault-set. This methods should be non-blocking and result should be appended into - * {@link JobData} - */ - void startRetrieveResult(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/SyncBackendJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/SyncBackendJob.java deleted file mode 100644 index d18b81b91d0..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/SyncBackendJob.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -/** - * Backend only supports sync-submission, i.e. submit and wait till job finish and get result in one - * call, then implement this interface. - */ -public interface SyncBackendJob extends Job { - void submitAndGetResult() throws LinkisClientRuntimeException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminatableJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminatableJob.java deleted file mode 100644 index ff7e832724f..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminatableJob.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -/** Backend supports killing job */ -public interface TerminatableJob extends Job { - /** for jobs that starts a new thread */ - TerminateToken getTerminateToken(); - - void terminate() throws LinkisClientRuntimeException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminateToken.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminateToken.java deleted file mode 100644 index e8d81a430d8..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminateToken.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import java.util.concurrent.atomic.AtomicBoolean; - -public class TerminateToken { - private AtomicBoolean shouldTerminate = new AtomicBoolean(false); - - public boolean shouldTerminate() { - return shouldTerminate.get(); - } - - public void setTerminate() { - shouldTerminate.set(true); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropertiesLoader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropertiesLoader.java deleted file mode 100644 index df79c8262ea..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropertiesLoader.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; - -public interface PropertiesLoader { - PropertiesLoader setPropertiesReaders(PropertiesReader[] readers); - - PropertiesLoader addPropertiesReader(PropertiesReader reader); - - PropertiesLoader addPropertiesReaders(PropertiesReader[] readers); - - void removePropertiesReader(String identifier); - - ClientProperties[] loadProperties(); - - void checkInit(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoader.java deleted file mode 100644 index f6917206ab5..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoader.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; - -import java.util.*; - -public class StdPropsLoader implements PropertiesLoader { - Map readersMap; - - public StdPropsLoader() { - this.readersMap = new HashMap<>(); - } - - @Override - public PropertiesLoader setPropertiesReaders(PropertiesReader[] readers) { - this.readersMap = new HashMap<>(); - for (PropertiesReader reader : readers) { - readersMap.put(reader.getPropsId(), reader); - } - return this; - } - - @Override - public PropertiesLoader addPropertiesReader(PropertiesReader reader) { - if (reader != null) { - readersMap.put(reader.getPropsId(), reader); - } - return this; - } - - @Override - public PropertiesLoader addPropertiesReaders(PropertiesReader[] readers) { - if (readers != null && readers.length > 0) { - for (PropertiesReader reader : readers) { - readersMap.put(reader.getPropsId(), reader); - } - } - return this; - } - - @Override - public void removePropertiesReader(String identifier) { - readersMap.remove(identifier); - } - - @Override - public ClientProperties[] loadProperties() { - checkInit(); - List propsList = new ArrayList<>(); - PropertiesReader readerTmp; - for (Map.Entry entry : readersMap.entrySet()) { - readerTmp = entry.getValue(); - Properties props = readerTmp.getProperties(); - ClientProperties clientProperties = new ClientProperties(); - clientProperties.putAll(props); - clientProperties.setPropsId(readerTmp.getPropsId()); - clientProperties.setPropertiesSourcePath(readerTmp.getPropsPath()); - propsList.add(clientProperties); - } - return propsList.toArray(new ClientProperties[propsList.size()]); - } - - @Override - public void checkInit() { - if (readersMap == null || readersMap.size() == 0) { - throw new PropsException( - "PRP0003", - ErrorLevel.ERROR, - CommonErrMsg.PropsLoaderInitErr, - "properties loader is not inited because it contains no reader"); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysEnvReader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysEnvReader.java deleted file mode 100644 index b89ebe97552..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysEnvReader.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties.reader; - -import org.apache.linkis.cli.core.constants.CommonConstants; - -import java.util.Properties; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SysEnvReader implements PropertiesReader { - private static final Logger logger = LoggerFactory.getLogger(SysEnvReader.class); - private String propsId = CommonConstants.SYSTEM_ENV_IDENTIFIER; - private String propsPath = "SYSTEM"; - - @Override - public String getPropsId() { - return propsId; - } - - @Override - public PropertiesReader setPropsId(String identifier) { - this.propsId = identifier; - return this; - } - - @Override - public String getPropsPath() { - return propsPath; - } - - @Override - public PropertiesReader setPropsPath(String propertiesPath) { - this.propsPath = propertiesPath; - return this; - } - - @Override - public Properties getProperties() { - checkInit(); - Properties props = new Properties(); - props.putAll(System.getenv()); - return props; - } - - @Override - public void checkInit() {} -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysPropsReader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysPropsReader.java deleted file mode 100644 index bf4bbc02700..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysPropsReader.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties.reader; - -import org.apache.linkis.cli.core.constants.CommonConstants; - -import java.util.Properties; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SysPropsReader implements PropertiesReader { - private static final Logger logger = LoggerFactory.getLogger(SysPropsReader.class); - private String propsId = CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER; - private String propsPath = "SYSTEM"; - - @Override - public String getPropsId() { - return propsId; - } - - @Override - public PropertiesReader setPropsId(String identifier) { - this.propsId = identifier; - return this; - } - - @Override - public String getPropsPath() { - return propsPath; - } - - @Override - public PropertiesReader setPropsPath(String propertiesPath) { - this.propsPath = propertiesPath; - return this; - } - - @Override - public Properties getProperties() { - checkInit(); - return System.getProperties(); - } - - @Override - public void checkInit() {} -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/DefaultResultHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/DefaultResultHandler.java deleted file mode 100644 index 836aeb7ad82..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/DefaultResultHandler.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.result; - -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.utils.LogUtils; - -import static java.lang.System.exit; - -/** exit -1 when failure and exit 0 when success */ -public class DefaultResultHandler implements ResultHandler { - @Override - public void process(ExecutionResult executionResult) { - if (executionResult.getExecutionStatus() == ExecutionStatusEnum.SUCCEED) { - LogUtils.getPlaintTextLogger().info(CommonConstants.SUCCESS_INDICATOR); - exit(0); - } else { - LogUtils.getPlaintTextLogger().info(CommonConstants.FAILURE_INDICATOR); - exit(-1); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionResultImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionResultImpl.java deleted file mode 100644 index b145ba9c8bf..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionResultImpl.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.result; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; - -import java.util.Map; - -public class ExecutionResultImpl implements ExecutionResult { - - Map jobsMap; - ExecutionStatus executionStatus; - Exception exception; - - public ExecutionResultImpl(Map jobsMap, ExecutionStatus executionStatus) { - this.jobsMap = jobsMap; - this.executionStatus = executionStatus; - } - - public ExecutionResultImpl( - Map jobsMap, ExecutionStatus executionStatus, Exception exception) { - this.jobsMap = jobsMap; - this.executionStatus = executionStatus; - this.exception = exception; - } - - @Override - public Map getJobs() { - return jobsMap; - } - - @Override - public ExecutionStatus getExecutionStatus() { - return this.executionStatus; - } - - @Override - public void setExecutionStatus(ExecutionStatus executionStatus) { - this.executionStatus = executionStatus; - } - - @Override - public Exception getException() { - return this.exception; - } - - @Override - public void setException(Exception exception) { - this.exception = exception; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionStatusEnum.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionStatusEnum.java deleted file mode 100644 index 0c4375b3d17..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionStatusEnum.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.result; - -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; - -public enum ExecutionStatusEnum implements ExecutionStatus { - UNDEFINED("Inited", 1), - SUCCEED("Succeed", 2), - FAILED("Failed", 3); - - private String name; - private int id; - - ExecutionStatusEnum(String name, int id) { - this.name = name; - this.id = id; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java deleted file mode 100644 index 5a3fbfa4a2f..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.result; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class PresentResultHandler implements ResultHandler { - private static Logger logger = LoggerFactory.getLogger(PresentResultHandler.class); - Presenter presenter; - Model model; - - public void checkInit() { - if (presenter == null || model == null) { - throw new LinkisClientExecutionException( - "EXE0031", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionResultErr, - "Presenter or model is null"); - } - } - - public void setPresenter(Presenter presenter) { - this.presenter = presenter; - } - - public void setModel(Model model) { - this.model = model; - } - - @Override - public void process(ExecutionResult executionResult) { - checkInit(); - Map jobs = executionResult.getJobs(); - // Probably need modification if we further want multiple-jobs support - // but we probably don't want to support that - if (jobs != null) { - for (Job job : jobs.values()) { - if (job != null) { - model.buildModel(job.getJobData()); - } - try { - presenter.present(model, job.getPresentWay()); - } catch (Exception e) { - logger.error("Execution failed because exception thrown when presenting data.", e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - executionResult.setException(e); - } - } - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParsedTplValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParsedTplValidator.java deleted file mode 100644 index e7f14e2df1b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParsedTplValidator.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.validate; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.text.MessageFormat; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * 1. Check if there is missing or unknown option. 2. Call checkParam method for command-specific - * validation. - */ -public class ParsedTplValidator implements Validator { - private static final Logger logger = LoggerFactory.getLogger(ParsedTplValidator.class); - - @Override - public void doValidation(Object input) throws CommandException { - if (!(input instanceof CmdTemplate)) { - throw new ValidateException( - "VLD0006", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of ParsedTplValidator is not instance of CmdTemplate"); - } - - CmdTemplate parsedTemplateCopy = (CmdTemplate) input; - - String msg = "start validating command \"{0}\", template \"{1}\""; - logger.info( - MessageFormat.format( - msg, parsedTemplateCopy.getCmdType().getName(), parsedTemplateCopy.getCmdType())); - - checkOptions(parsedTemplateCopy); - - logger.info("Start params-check"); - parsedTemplateCopy.checkParams(); - logger.info("params-check ok."); - } - - /** Validation */ - private void checkOptions(CmdTemplate template) throws CommandException { - List> options = template.getOptions(); - for (CmdOption cmdOption : options) { - if (!cmdOption.hasVal() && !cmdOption.isOptional()) { - throw new ValidateException( - "VLD0003", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "CmdOption value cannot be empty: paramName:" - + cmdOption.getParamName() - + "CmdType: " - + template.getCmdType()); - } - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/PropsValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/PropsValidator.java deleted file mode 100644 index 6231f4eb886..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/PropsValidator.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.validate; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -public class PropsValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof ClientProperties)) { - // TODO:throw - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/StdVarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/StdVarAccess.java deleted file mode 100644 index 267dae23252..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/StdVarAccess.java +++ /dev/null @@ -1,326 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.var; - -import org.apache.linkis.cli.common.entity.command.ParamItem; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.VarAccessException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; -import org.apache.linkis.cli.core.utils.converter.PredefinedStringConverters; - -import org.apache.commons.lang3.StringUtils; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StdVarAccess implements VarAccess { - private static Logger logger = LoggerFactory.getLogger(StdVarAccess.class); - private Params cmdParams; - private ClientProperties userConf; - private ClientProperties defaultConf; - private Map subMapCache; - - public StdVarAccess setCmdParams(Params cmdParams) { - this.cmdParams = cmdParams; - return this; - } - - public Params getSubParam(String identifier) { - return this.cmdParams; - } - - public StdVarAccess setUserConf(ClientProperties userConf) { - this.userConf = userConf; - return this; - } - - public ClientProperties getUserConf(String identifier) { - return this.userConf; - } - - public StdVarAccess setDefaultConf(ClientProperties defaultConf) { - this.defaultConf = defaultConf; - return this; - } - - public ClientProperties getDefaultConf(String identifier) { - return this.defaultConf; - } - - public StdVarAccess init() { - this.subMapCache = new HashMap<>(); - putSubMapCache(subMapCache, cmdParams); - return this; - } - - private void putSubMapCache(Map subMapCache, Params param) { - for (ParamItem item : param.getParamItemMap().values()) { - // scan through all map type value and try get value for key - if (item.getValue() != null - && item.hasVal() - && item.getValue() instanceof Map - && !(item.getValue() instanceof SpecialMap)) { - try { - Map subMap = (Map) item.getValue(); - for (Map.Entry entry : subMap.entrySet()) { - if (subMapCache.containsKey(item.getKey())) { - logger.warn( - "Value of duplicated key \"{}\" in subMap \"{}\" will be ignored.", - item.getKey(), - item.getKey()); - } else if (StringUtils.isNotBlank(entry.getKey()) - && StringUtils.isNotBlank(entry.getValue())) { - subMapCache.put(entry.getKey(), entry.getValue()); - } - } - } catch (ClassCastException e) { - logger.warn( - "Param: {} has an unsupported Map type(not Map). It wiil be ignored", - item.getKey()); - } - } - } - } - - @Override - public void checkInit() { - if (this.cmdParams == null || this.defaultConf == null || this.subMapCache == null) { - throw new VarAccessException( - "VA0002", - ErrorLevel.ERROR, - CommonErrMsg.VarAccessInitErr, - "stdVarAccess is not inited. " - + "cmdParams: " - + cmdParams - + "defaultConf: " - + defaultConf - + "subMapCache: " - + subMapCache); - } - } - - @Override - public T getVarOrDefault(Class clazz, String key, T defaultValue) { - - if (StringUtils.isBlank(key)) { - return null; - } - - T val = getVar(clazz, key); - - return val != null ? val : defaultValue; - } - - @Override - public T getVar(Class clazz, String key) { - checkInit(); - if (key == null || StringUtils.isBlank(key)) { - return null; - } - T p1 = getVarFromParam(clazz, key, cmdParams); - - T pd1 = getDefaultVarFromParam(clazz, key, cmdParams); - - T c1 = getVarFromCfg(clazz, key, userConf); - T c2 = getVarFromCfg(clazz, key, defaultConf); - - return p1 != null ? p1 : c1 != null ? c1 : c2 != null ? c2 : pd1; - } - - private T getVarFromParam(Class clazz, String key, Params params) { - if (params == null || StringUtils.isBlank(key)) { - return null; - } - - Object v1 = - params.getParamItemMap().containsKey(key) && params.getParamItemMap().get(key).hasVal() - ? setNullIfEmpty(params.getParamItemMap().get(key).getValue()) - : null; - - Object v2 = setNullIfEmpty(convertStringVal(clazz, subMapCache.getOrDefault(key, null))); - - // extraParam has lower priority - Object v3 = - params.getExtraProperties() == null - ? null - : setNullIfEmpty(params.getExtraProperties().getOrDefault(key, null)); - - Object retObj = v1 != null ? v1 : v2 != null ? v2 : v3; - - return clazz.cast(retObj); - } - - private boolean paramHasVar(String key, Params params) { - boolean b1 = - params.getParamItemMap().containsKey(key) && params.getParamItemMap().get(key).hasVal(); - boolean b2 = subMapCache.containsKey(key); - boolean b3 = params.getExtraProperties().containsKey(key); - return b1 || b2 || b3; - } - - private T getDefaultVarFromParam(Class clazz, String key, Params params) { - if (params == null || StringUtils.isBlank(key) || !params.getParamItemMap().containsKey(key)) { - return null; - } - - Object vd = setNullIfEmpty(params.getParamItemMap().get(key).getDefaultValue()); - - return clazz.cast(vd); - } - - private T getVarFromCfg(Class clazz, String key, ClientProperties conf) { - - if (conf == null) { - return null; - } - Object val = conf.get(key); - if (val == null) { - return null; - } - String strVal; - try { - strVal = (String) val; - } catch (ClassCastException e) { - throw new VarAccessException( - "VA0003", - ErrorLevel.ERROR, - CommonErrMsg.VarAccessErr, - "Cannot getVar \"" + key + "\" from config. Cause: value is not String"); - } - - return convertStringVal(clazz, strVal); - } - - private Object setNullIfEmpty(Object obj) { - Object ret; - if (obj instanceof String && StringUtils.isBlank((String) obj)) { - ret = null; - } else if (obj instanceof Map && ((Map) obj).size() == 0) { - ret = null; - } else if (obj instanceof Collections && ((Collection) obj).size() == 0) { - ret = null; - } else { - ret = obj; - } - return ret; - } - - private boolean cfgHasVar(String key, ClientProperties conf) { - return conf == null ? false : conf.containsKey(key); - } - - private T convertStringVal(Class clazz, String strVal) { - Object ret; - if (StringUtils.isBlank(strVal)) { - return null; - } - if (clazz == Object.class) { - ret = strVal; - } else if (clazz == String.class) { - ret = convertGivenConverter(strVal, PredefinedStringConverters.NO_CONVERTER); - } else if (clazz == Integer.class) { - ret = convertGivenConverter(strVal, PredefinedStringConverters.INT_CONVERTER); - } else if (clazz == Long.class) { - ret = convertGivenConverter(strVal, PredefinedStringConverters.LONG_CONVERTER); - } else if (clazz == Boolean.class) { - ret = convertGivenConverter(strVal, PredefinedStringConverters.BOOLEAN_CONVERTER); - } else if (Map.class.isAssignableFrom(clazz)) { - // TODO: throw or return null if not string map - ret = null; - // convertGivenConverter(strVal, - // PredefinedStringConverters.STRING_MAP_CONVERTER); - } else if (clazz == String[].class) { - ret = null; - // ret = convertGivenConverter(strVal, - // PredefinedStringConverters.STR_ARRAY_CONVERTER); - } else { - throw new VarAccessException( - "VA0004", - ErrorLevel.ERROR, - CommonErrMsg.VarAccessErr, - "Cannot convertStringVal \"" - + strVal - + "\" to " - + clazz.getCanonicalName() - + ": designated type is not supported"); - } - return clazz.cast(ret); - } - - private T convertGivenConverter(String strVal, AbstractStringConverter converter) { - return converter.convert(strVal); - } - - @Override - public String[] getAllVarKeys() { - List varKeys = new ArrayList<>(); - - addParamVarKeys(varKeys, cmdParams); - - addPropsVarKeys(varKeys, userConf); - addPropsVarKeys(varKeys, defaultConf); - - return varKeys.toArray(new String[varKeys.size()]); - } - - private void addParamVarKeys(List varKeys, Params param) { - if (param != null) { - for (String key : param.getParamItemMap().keySet()) { - if (!varKeys.contains(key)) { - varKeys.add(key); - } - } - for (String key : subMapCache.keySet()) { - // scan through all map type value and try add key - if (!varKeys.contains(key)) { - varKeys.add(key); - } - } - for (String key : param.getExtraProperties().keySet()) { - if (!varKeys.contains(key)) { - varKeys.add(key); - } - } - } - } - - private void addPropsVarKeys(List varKeys, ClientProperties props) { - if (props != null) { - for (Object key : props.keySet()) { - if (!varKeys.contains(key)) { - varKeys.add((String) key); - } - } - } - } - - @Override - public boolean hasVar(String key) { - boolean b1 = paramHasVar(key, cmdParams); - boolean b2 = cfgHasVar(key, userConf); - boolean b3 = cfgHasVar(key, defaultConf); - return b1 || b2 || b3; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/SysVarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/SysVarAccess.java deleted file mode 100644 index d291e1bda57..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/SysVarAccess.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.var; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.VarAccessException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SysVarAccess implements VarAccess { - private static Logger logger = LoggerFactory.getLogger(SysVarAccess.class); - private ClientProperties sysProp; - private ClientProperties sysEnv; - - public SysVarAccess setSysProp(ClientProperties sysProp) { - this.sysProp = sysProp; - return this; - } - - public ClientProperties getSysProp(String identifier) { - return this.sysProp; - } - - public SysVarAccess setSysEnv(ClientProperties sysEnv) { - this.sysEnv = sysEnv; - return this; - } - - public ClientProperties getSysEnv(String identifier) { - return this.sysEnv; - } - - @Override - public void checkInit() { - if (this.sysProp == null && this.sysEnv == null) { - throw new VarAccessException( - "VA0001", - ErrorLevel.ERROR, - CommonErrMsg.VarAccessInitErr, - "sys_prop and sys_env are both null"); - } - } - - @Override - public T getVar(Class clazz, String key) { - checkInit(); - if (clazz != String.class) { - // throw exception - } - Object o1 = sysProp.get(key); - Object o2 = sysEnv.get(key); - if (o1 != null && o2 != null) { - throw new VarAccessException( - "VA0002", - ErrorLevel.WARN, - CommonErrMsg.VarAccessErr, - "same key occurred in sys_prop and sys_env. will use sys_prop"); - } - Object ret = o1 != null ? o1 : o2; - return clazz.cast(ret); - } - - @Override - public T getVarOrDefault(Class clazz, String key, T defaultValue) { - T ret = getVar(clazz, key); - if (ret == null) { - ret = defaultValue; - } - return ret; - } - - @Override - public String[] getAllVarKeys() { - List varKeys = new ArrayList<>(); - if (sysProp != null) { - for (Object key : sysProp.keySet()) { - varKeys.add((String) key); - } - } - if (sysEnv != null) { - for (Object key : sysEnv.keySet()) { - varKeys.add((String) key); - } - } - return varKeys.toArray(new String[varKeys.size()]); - } - - @Override - public boolean hasVar(String key) { - return sysEnv.containsKey(key) || sysProp.containsKey(key); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorBuilder.java deleted file mode 100644 index 8727ec495e9..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorBuilder.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.operator; - -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.core.builder.BuildableByVarAccess; - -public abstract class JobOperatorBuilder extends BuildableByVarAccess { - @Override - public JobOperatorBuilder setStdVarAccess(VarAccess varAccess) { - return (JobOperatorBuilder) super.setStdVarAccess(varAccess); - } - - @Override - public JobOperatorBuilder setSysVarAccess(VarAccess varAccess) { - return (JobOperatorBuilder) super.setSysVarAccess(varAccess); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorFactory.java deleted file mode 100644 index dd6031d66ab..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorFactory.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.operator; - -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.locks.ReentrantLock; - -public class JobOperatorFactory { - private static Map builderMap = new ConcurrentHashMap<>(); - - private static Map instanceMap = new ConcurrentHashMap<>(); // for singleton - private static Map lockMap = new ConcurrentHashMap<>(); // for singleton - - public static synchronized void register(String name, JobOperatorBuilder builder) - throws Exception { - if (builderMap.containsKey(name) - || lockMap.containsKey(name) - || instanceMap.containsKey(name)) { - throw new LinkisClientExecutionException( - "EXE0027", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Attempting to register a duplicate jobOperator, name: " + name); - } - builderMap.put(name, builder); - lockMap.put(name, new ReentrantLock()); - } - - public static synchronized void remove(String name) { - builderMap.remove(name); - instanceMap.remove(name); - lockMap.remove(name); - } - - public static JobOperator getReusable(String name) throws Exception { - JobOperatorBuilder builder = builderMap.get(name); - ReentrantLock lock = lockMap.get(name); - JobOperator instance = instanceMap.get(name); - if (lock == null || builder == null) { - throw new LinkisClientExecutionException( - "EXE0028", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Failed to get a reusable joboperator, name: " + name); - } - if (instance == null) { - boolean ok = lock.tryLock(500, TimeUnit.MILLISECONDS); - if (!ok) { - throw new LinkisClientExecutionException( - "EXE0028", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Failed to get a reusable joboperator, name: " + name); - } - if (instance == null) { - instance = builder.build(); - instanceMap.put(name, instance); - } - lock.unlock(); - } - return instance; - } - - public static JobOperator getNew(String name) throws Exception { - JobOperatorBuilder builder = builderMap.get(name); - if (builder == null) { - throw new Exception("TODO"); // TODO - } - return builder.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/DefaultStdOutPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/DefaultStdOutPresenter.java deleted file mode 100644 index 7ed2307520d..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/DefaultStdOutPresenter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.StdOutWriter; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.CommonUtils; - -public class DefaultStdOutPresenter implements Presenter { - DisplayOperator driver = new StdOutWriter(); - - @Override - public void present(Model model, PresentWay presentWay) { - driver.doOutput(new StdoutDisplayData(CommonUtils.GSON.toJson(model))); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/HelpInfoPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/HelpInfoPresenter.java deleted file mode 100644 index 186081a5600..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/HelpInfoPresenter.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.template.option.MapOption; -import org.apache.linkis.cli.core.interactor.command.template.option.Parameter; -import org.apache.linkis.cli.core.interactor.command.template.option.StdOption; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.StdOutWriter; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.present.model.HelpInfoModel; - -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class HelpInfoPresenter implements Presenter { - private static Logger logger = LoggerFactory.getLogger(HelpInfoPresenter.class); - DisplayOperator driver = new StdOutWriter(); - - @Override - public void present(Model model, PresentWay presentWay) { - if (!(model instanceof HelpInfoModel)) { - throw new PresenterException( - "PST0010", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input for HelpInfoPresenter is not instance of model"); - } - if (driver == null) { - throw new PresenterException( - "PST0007", ErrorLevel.ERROR, CommonErrMsg.PresenterErr, "Driver is null"); - } - HelpInfoModel helpInfoModel = (HelpInfoModel) model; - - String helpInfo = getUsage(helpInfoModel.getTemplate()); - - driver.doOutput(new StdoutDisplayData(helpInfo)); - } - - /** Help info for sub-command */ - private String getUsage(CmdTemplate template) { - StringBuilder sb = new StringBuilder(); - List> options = template.getOptions(); - List> stdOptions = new ArrayList<>(); - List> parameters = new ArrayList<>(); - List> mapOptions = new ArrayList<>(); - for (CmdOption o : options) { - if (o instanceof StdOption) { - stdOptions.add(o); - } else if (o instanceof Parameter) { - parameters.add(o); - } else if (o instanceof MapOption) { - mapOptions.add(o); - } - } - - sb.append("Usage: ") - .append(template.getCmdType().getName()) - .append(options.size() > 0 ? " [OPTIONS] " : " "); - for (CmdOption p : parameters) { - if (p instanceof Parameter) { - sb.append(((Parameter) p).repr()).append(" "); - } - } - if (!"".equals(template.getCmdType().getDesc())) { - sb.append("\n\t").append(template.getCmdType().getDesc()); - } - - sb.append(options.size() > 0 ? "\nOptions:\n" : "\n"); - for (CmdOption o : stdOptions) { - sb.append(o.toString()).append("\n"); - } - - sb.append(options.size() > 0 ? "\nMapOptions:\n" : "\n"); - for (CmdOption o : mapOptions) { - sb.append(o.toString()).append("\n"); - } - - sb.append(parameters.size() > 0 ? "Parameters:\n" : "\n"); - for (CmdOption p : parameters) { - sb.append(p.toString()).append("\n"); - } - - return sb.toString(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentMode.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentMode.java deleted file mode 100644 index 6d26a2eb497..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentMode.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -public interface PresentMode { - String getName(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentModeImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentModeImpl.java deleted file mode 100644 index aee032e3a4d..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentModeImpl.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -public enum PresentModeImpl implements PresentMode { - STDOUT("stdout", 0), - TEXT_FILE("text_file", 1); - - private String name; - private int id; - - PresentModeImpl(String name, int id) { - this.name = name; - this.id = id; - } - - @Override - public String getName() { - return this.name(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentWayImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentWayImpl.java deleted file mode 100644 index 6fbe4e1ec02..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentWayImpl.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -import org.apache.linkis.cli.common.entity.present.PresentWay; - -public class PresentWayImpl implements PresentWay { - - private PresentMode mode; - private String path; - private boolean displayMetaAndLogo = true; - - public String getPath() { - return path; - } - - public void setPath(String path) { - this.path = path; - } - - public boolean isDisplayMetaAndLogo() { - return displayMetaAndLogo; - } - - public void setDisplayMetaAndLogo(boolean displayMetaAndLogo) { - this.displayMetaAndLogo = displayMetaAndLogo; - } - - public PresentMode getMode() { - return mode; - } - - public void setMode(PresentMode mode) { - this.mode = mode; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperFactory.java deleted file mode 100644 index 9f584f4505d..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperFactory.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display; - -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentMode; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -public class DisplayOperFactory { - private static final Map operatorMap = new ConcurrentHashMap<>(); - - public static synchronized void register(PresentMode mode, DisplayOperator operator) - throws Exception { - if (operatorMap.containsKey(mode.getName())) { - throw new PresenterException( - "PST0012", - ErrorLevel.ERROR, - CommonErrMsg.PresenterInitErr, - "Attempting to register a duplicate DisplayOperator, name: " + mode.getName()); - } - operatorMap.put(mode.getName(), operator); - } - - public static synchronized void remove(PresentMode mode) { - operatorMap.remove(mode.getName()); - } - - public static DisplayOperator getDisplayOper(PresentMode mode) { - return operatorMap.get(mode.getName()); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperator.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperator.java deleted file mode 100644 index 228b75856d5..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperator.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display; - -import org.apache.linkis.cli.core.present.display.data.DisplayData; - -public interface DisplayOperator { - void doOutput(DisplayData data); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java deleted file mode 100644 index 789b3445011..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display; - -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.display.data.DisplayData; -import org.apache.linkis.cli.core.present.display.data.FileDisplayData; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileOutputStream; -import java.io.OutputStreamWriter; - -public class PlainTextFileWriter implements DisplayOperator { - @Override - public void doOutput(DisplayData data) { - if (!(data instanceof FileDisplayData)) { - throw new PresenterException( - "PST0004", - ErrorLevel.ERROR, - CommonErrMsg.PresentDriverErr, - "input data is not instance of FileDisplayData"); - } - - String pathName = ((FileDisplayData) data).getPathName(); - String fileName = pathName + File.separator + ((FileDisplayData) data).getFileName(); - String content = ((FileDisplayData) data).getContent(); - Boolean overWrite = ((FileDisplayData) data).getCreateNewFile(); - - File dir = new File(pathName); - File file = new File(fileName); - - if (!dir.exists()) { - try { - dir.mkdirs(); - } catch (Exception e) { - throw new PresenterException( - "PST0005", - ErrorLevel.ERROR, - CommonErrMsg.PresentDriverErr, - "Cannot mkdir for path: " + dir.getAbsolutePath(), - e); - } - } - - if (overWrite || !file.exists()) { - try { - file.createNewFile(); - } catch (Exception e) { - throw new PresenterException( - "PST0006", - ErrorLevel.ERROR, - CommonErrMsg.PresentDriverErr, - "Cannot create file for path: " + file.getAbsolutePath(), - e); - } - } - - FileOutputStream fos = null; - OutputStreamWriter osWritter = null; - BufferedWriter bufferedWriter = null; - try { - fos = new FileOutputStream(file, !overWrite); - osWritter = new OutputStreamWriter(fos, "UTF-8"); - bufferedWriter = new BufferedWriter(osWritter, 1024); - bufferedWriter.write(content + "\n"); - } catch (Exception e) { - throw new PresenterException( - "PST0007", - ErrorLevel.ERROR, - CommonErrMsg.PresentDriverErr, - "Cannot write: " + file.getAbsolutePath(), - e); - - } finally { - if (bufferedWriter != null) { - try { - bufferedWriter.close(); - } catch (Exception e) { - // ignore - } - } - if (osWritter != null) { - try { - osWritter.close(); - } catch (Exception e) { - // ignore - } - } - if (fos != null) { - try { - fos.close(); - } catch (Exception e) { - // ignore - } - } - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/StdOutWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/StdOutWriter.java deleted file mode 100644 index bc91340dbf4..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/StdOutWriter.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display; - -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.display.data.DisplayData; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.slf4j.Logger; - -public class StdOutWriter implements DisplayOperator { - @Override - public void doOutput(DisplayData data) { - if (!(data instanceof StdoutDisplayData)) { - throw new PresenterException( - "PST0008", - ErrorLevel.ERROR, - CommonErrMsg.PresentDriverErr, - "input data is not instance of StdoutDisplayData"); - } - String content = ((StdoutDisplayData) data).getContent(); - Logger logger = LogUtils.getPlaintTextLogger(); - logger.info(content); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/DisplayData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/DisplayData.java deleted file mode 100644 index 38463b72698..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/DisplayData.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display.data; - -public interface DisplayData {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/FileDisplayData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/FileDisplayData.java deleted file mode 100644 index 607e05fd862..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/FileDisplayData.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display.data; - -public class FileDisplayData extends StdoutDisplayData implements DisplayData { - private String pathName; - private String fileName; - private Boolean createNewFile; - - public FileDisplayData(String pathName, String fileName, String content, Boolean createNewFile) { - super(content); - this.pathName = pathName; - this.fileName = fileName; - this.createNewFile = createNewFile; - } - - public String getPathName() { - return pathName; - } - - public String getContent() { - return super.getContent(); - } - - public boolean getCreateNewFile() { - return createNewFile; - } - - public String getFileName() { - return fileName; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/StdoutDisplayData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/StdoutDisplayData.java deleted file mode 100644 index e3322564171..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/StdoutDisplayData.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display.data; - -public class StdoutDisplayData implements DisplayData { - private String content; - - public StdoutDisplayData(String content) { - this.content = content; - } - - public String getContent() { - return content; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/CommonUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/CommonUtils.java deleted file mode 100644 index 8d1a3b8ac96..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/CommonUtils.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.utils; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.Map; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -public class CommonUtils { - - public static final Gson GSON = - new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); - - public static T castStringToAny(Class clazz, String val) { - if (StringUtils.isBlank(val)) { - return null; - } - T ret = null; - if (clazz == Object.class) { - ret = clazz.cast(val); - } else if (clazz == String.class) { - ret = clazz.cast(val); - } else if (clazz == Integer.class) { - ret = clazz.cast(Integer.parseInt(val)); - } else if (clazz == Double.class) { - ret = clazz.cast(Double.parseDouble(val)); - } else if (clazz == Float.class) { - ret = clazz.cast(Float.parseFloat(val)); - } else if (clazz == Long.class) { - ret = clazz.cast(Long.parseLong(val)); - } else if (clazz == Boolean.class) { - ret = clazz.cast(Boolean.parseBoolean(val)); - } - return ret; - } - - public static void doSleepQuietly(Long sleepMills) { - try { - Thread.sleep(sleepMills); - } catch (Exception ignore) { - // ignored - } - } - - public static Map parseKVStringToMap(String kvStr, String separator) { - if (StringUtils.isBlank(separator)) { - separator = ","; - } - if (StringUtils.isBlank(kvStr)) { - return null; - } - Map argsProps = new HashMap<>(); - String[] args = StringUtils.splitByWholeSeparator(kvStr, separator); - for (String arg : args) { - int index = arg.indexOf("="); - if (index != -1) { - argsProps.put(arg.substring(0, index).trim(), arg.substring(index + 1).trim()); - } - } - - return argsProps; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/LogUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/LogUtils.java deleted file mode 100644 index b8147a4a603..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/LogUtils.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.utils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LogUtils { - private static final Logger PlainTextLogger = LoggerFactory.getLogger("PlaintTextLogger"); - private static final Logger InformationLogger = LoggerFactory.getLogger("InformationLogger"); - - public static Logger getPlaintTextLogger() { - return PlainTextLogger; - } - - public static Logger getInformationLogger() { - return InformationLogger; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/SchedulerUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/SchedulerUtils.java deleted file mode 100644 index 5cfce486283..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/SchedulerUtils.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.utils; - -import java.util.concurrent.*; -import java.util.concurrent.atomic.AtomicInteger; - -public class SchedulerUtils { - private static ExecutorService fixedThreadPool; - private static ThreadPoolExecutor cachedThreadPool; - private static int THREAD_NUM = 5; - private static String THREAD_NAME = "LinkisCli-Scheduler"; - private static Boolean IS_DEAMON = false; - - public static ThreadFactory threadFactory(String threadName, Boolean isDaemon) { - return new ThreadFactory() { - AtomicInteger num = new AtomicInteger(0); - - @Override - public Thread newThread(Runnable r) { - Thread t = new Thread(r); - t.setDaemon(isDaemon); - t.setName(threadName + num.incrementAndGet()); - return t; - } - }; - } - - public static ThreadPoolExecutor newCachedThreadPool( - int threadNum, String threadName, Boolean isDaemon) { - ThreadPoolExecutor threadPool = - new ThreadPoolExecutor( - threadNum, - threadNum, - 120L, - TimeUnit.SECONDS, - new LinkedBlockingQueue(10 * threadNum), - threadFactory(threadName, isDaemon)); - threadPool.allowCoreThreadTimeOut(true); - return threadPool; - } - - public static ExecutorService newFixedThreadPool( - int threadNum, String threadName, Boolean isDaemon) { - return Executors.newFixedThreadPool(threadNum, threadFactory(threadName, isDaemon)); - } - - public static ThreadPoolExecutor getCachedThreadPoolExecutor() { - if (cachedThreadPool == null) { - synchronized (SchedulerUtils.class) { - if (cachedThreadPool == null) { - cachedThreadPool = newCachedThreadPool(THREAD_NUM, THREAD_NAME, IS_DEAMON); - } - } - } - return cachedThreadPool; - } - - public static ExecutorService getFixedThreadPool() { - if (fixedThreadPool == null) { - synchronized (SchedulerUtils.class) { - if (fixedThreadPool == null) { - fixedThreadPool = newFixedThreadPool(THREAD_NUM, THREAD_NAME, IS_DEAMON); - } - } - } - return fixedThreadPool; - } - - public static void shutDown() { - if (fixedThreadPool != null) { - fixedThreadPool.shutdownNow(); - } - if (cachedThreadPool != null) { - cachedThreadPool.shutdownNow(); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/CommonConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/CommonConstantsTest.java deleted file mode 100644 index a1fb04f01de..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/CommonConstantsTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class CommonConstantsTest { - - @Test - @DisplayName("constTest") - public void constTest() { - - Long jobQuerySleepMills = CommonConstants.JOB_QUERY_SLEEP_MILLS; - Integer requestMaxRetryTime = CommonConstants.REQUEST_MAX_RETRY_TIME; - String universalSubcmd = CommonConstants.UNIVERSAL_SUBCMD; - String universalSubcmdDesc = CommonConstants.UNIVERSAL_SUBCMD_DESC; - String successIndicator = CommonConstants.SUCCESS_INDICATOR; - String failureIndicator = CommonConstants.FAILURE_INDICATOR; - String arraySeq = CommonConstants.ARRAY_SEQ; - String arraySeqRegex = CommonConstants.ARRAY_SEQ_REGEX; - int maxNumOfCommandArguements = CommonConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS; - String configDir = CommonConstants.CONFIG_DIR; - String[] configExtension = CommonConstants.CONFIG_EXTENSION; - String systemPropertiesIdentifier = CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER; - String systemEnvIdentifier = CommonConstants.SYSTEM_ENV_IDENTIFIER; - - Assertions.assertTrue(2000l == jobQuerySleepMills); - Assertions.assertTrue(3 == requestMaxRetryTime); - Assertions.assertEquals("linkis-cli", universalSubcmd); - Assertions.assertEquals( - "command for all types of jobs supported by Linkis", universalSubcmdDesc); - - Assertions.assertEquals("############Execute Success!!!########", successIndicator); - Assertions.assertEquals("############Execute Error!!!########", failureIndicator); - Assertions.assertEquals("@#@", arraySeq); - Assertions.assertEquals("(?=([^\"]*\"[^\"]*\")*[^\"]*$)", arraySeqRegex); - Assertions.assertTrue(10 == maxNumOfCommandArguements); - Assertions.assertEquals("config.path", configDir); - Assertions.assertTrue(configExtension.length == 1); - Assertions.assertEquals("SYS_PROP", systemPropertiesIdentifier); - Assertions.assertEquals("SYS_ENV", systemEnvIdentifier); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/TestConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/TestConstants.java deleted file mode 100644 index 307369054bf..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/TestConstants.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -public class TestConstants { - - public static final String PRIMARY = "test.primary"; - public static final String PRIMARY_DESC = " command"; - - // for command name - public static final String SPARK = "spark"; - public static final String HIVE = "hive"; - - // for command description - public static final String SPARK_DESC = "Execute sql with spark"; - public static final String HIVE_DESC = "Execute hive sql"; - public static final String JOB_DESC = ""; - - // Common command params - public static final String PARAM_COMMON_FILE = "param.common.file"; - public static final String PARAM_COMMON_CMD = "param.common.command"; - public static final String PARAM_COMMON_OUTPUT = "param.common.output"; - public static final String PARAM_COMMON_ARGS = "param.common.args"; - public static final String PARAM_COMMON_SPLIT = "param.common.split"; - public static final String PARAM_COMMON_OTHER_KV = - "param.common.other.kv"; // for customize some parameters for some commands. Has to be - // in the for of kv-pairs separated by ',' - public static final String PARAM_YARN_QUEUE = "param.yarn.queue"; - - // for job command - public static final String PARAM_JOB_TYPE = "param.job.type"; - public static final String PARAM_JOB_ID = "param.job.id"; - public static final String PARAM_FORCE_KILL = "param.force.kill"; - public static final String LINKIS_JOBID_PREFIX = "lks_"; - - public static final String PARAM_MAPPER_KV_STRING = - "param.mapper.kv.string"; // Rules for ParamKeyMapper. format: - // key1=value1,key2=value2... - public static final String YARN_QUEUE = "wds.linkis.yarnqueue"; - public static final String YARN_QUEUE_DEFAULT = "default"; - public static final String YARN_QUEUE_CORE_MAX = "wds.linkis.yarnqueue.cores.max"; - public static final int YARN_QUEUE_CORE_MAX_DEFAULT = 150; - public static final String YARN_QUEUE_MEM_MAX = "wds.linkis.yarnqueue.memory.max"; - public static final String YARN_QUEUE_MEM_MAX_DEFAULT = "300G"; - public static final String PREHEATING_TIME = "wds.linkis.preheating.time"; - public static final String PREHEATING_TIME_DEFAULT = "9:00"; - public static final String TMPFILE_CLEAN_TIME = "wds.linkis.tmpfile.clean.time"; - public static final String TMPFILE_CLEAN_TIME_DEFAULT = "10:00"; - public static final String LINKIS_INSTANCE = "wds.linkis.instance"; - public static final int LINKIS_INSTANCE_DEFAULT = 10; - public static final String LINKIS_CLIENT_MEMORY_MAX = "wds.linkis.client.memory.max"; - public static final String LINKIS_CLIENT_MEMORY_MAX_DEFAULT = "20G"; - - // Common - public static final String LINKIS_NULL_VALUE = ""; - - public static final String SPARK_CMD = "spark"; - - public static final String PARAM_SPARK_NAME = "param.spark.name"; - public static final String PARAM_SPARK_HIVECONF = "param.spark.hiveconf"; - public static final String PARAM_SPARK_NUM_EXECUTORS = "param.spark.num.executors"; - public static final String PARAM_SPARK_EXECUTOR_CORES = "param.spark.executor.cores"; - public static final String PARAM_SPARK_EXECUTOR_MEMORY = "param.spark.executor.memory"; - public static final String PARAM_SPARK_SHUFFLE_PARTITIONS = "param.spark.shuffle.partitions"; - public static final String PARAM_SPARK_RUNTYPE = "param.spark.runtype"; - - public static final String LINKIS_SPARK_NAME = "appName"; - public static final String LINKIS_SPARK_NUM_EXECUTORS = "spark.executor.instances"; - public static final String LINKIS_SPARK_EXECUTOR_CORES = "spark.executor.cores"; - public static final String LINKIS_SPARK_EXECUTOR_MEMORY = "spark.executor.memory"; - public static final String LINKIS_SPARK_SHUFFLE_PARTITIONS = "spark.sql.shuffle.partitions"; - - public static final String PARAM_DB = "param.primary.database"; - public static final String PARAM_PROXY = "param.primary.proxy"; - public static final String PARAM_USER = "param.primary.user"; - public static final String PARAM_USR_CONF = "param.primary.user.conf"; - public static final String PARAM_PASSWORD = "param.primary.password"; - public static final String PARAM_SYNC_KEY = "param.primary.synckey"; - public static final String PARAM_PROXY_USER = "param.primary.proxyUser"; - public static final String PARAM_HELP = "param.help"; - public static final String PARAM_REAL_NAME = "param.primary.realName"; - public static final String PARAM_PIN_TOKEN = "param.primary.pinToken"; - - public static final String PARAM_PROPERTIES = "params.properties"; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/error/CommonErrMsgTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/error/CommonErrMsgTest.java deleted file mode 100644 index cdd51f4aeae..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/error/CommonErrMsgTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.error; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class CommonErrMsgTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - String templateGenerr = CommonErrMsg.TemplateGenErr.getMsgTemplate(); - String fitErrMsgTemplate = CommonErrMsg.TemplateFitErr.getMsgTemplate(); - String parserInitErrMsgTemplate = CommonErrMsg.ParserInitErr.getMsgTemplate(); - String parseErrMsgTemplate = CommonErrMsg.ParserParseErr.getMsgTemplate(); - - Assertions.assertEquals("Cannot generate template. :{0}", templateGenerr); - Assertions.assertEquals("Cannot fit input into template: {0}", fitErrMsgTemplate); - - Assertions.assertEquals("Failed to init parser: {0}", parserInitErrMsgTemplate); - Assertions.assertEquals("Failed to parse. {0}", parseErrMsgTemplate); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandlerTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandlerTest.java deleted file mode 100644 index ee23f0216ba..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandlerTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.handler; - -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; - -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; - -public class CommandExceptionHandlerTest { - ExceptionHandler handler = new CommandExceptionHandler(); - - // todo - @Disabled - @Test - public void handle() throws Exception { - CommandException cmdException = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException)); - - String[] params = {"param1", "param2"}; - CommandException cmdException2 = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - params, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException2)); - - CommandException cmdException3 = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - TestCmdType.PRIMARY, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException3)); - - CommandException cmdException4 = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - TestCmdType.PRIMARY, - params, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException4)); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/TestCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/TestCmdType.java deleted file mode 100644 index a6b05eef2bf..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/TestCmdType.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.core.constants.TestConstants; - -public enum TestCmdType implements CmdType { - PRIMARY(TestConstants.PRIMARY, 0, TestConstants.PRIMARY_DESC), - SPARK(TestConstants.SPARK, 1, TestConstants.SPARK_DESC); - // TEST(TestConstants.TEST_COMMAND, 0, TestConstants.TEST_DESC); - - private int id; - private String name; - private String desc; - - TestCmdType(String name, int id) { - this.id = id; - this.name = name; - this.desc = null; - } - - TestCmdType(String name, int id, String desc) { - this.id = id; - this.name = name; - this.desc = desc; - } - - @Override - public int getId() { - return this.id; - } - - @Override - public String getName() { - return this.name; - } - - @Override - public String getDesc() { - return this.desc; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtilsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtilsTest.java deleted file mode 100644 index 5c51a582fa9..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtilsTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.fitter; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class FitterUtilsTest { - - @Test - @DisplayName("isOptionTest") - public void isOptionTest() { - - String name = "-hadoop"; - boolean option = FitterUtils.isOption(name); - Assertions.assertTrue(option); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitterTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitterTest.java deleted file mode 100644 index ef542910dd7..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitterTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.fitter; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.core.interactor.command.template.TestSparkCmdTemplate; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -public class SingleTplFitterTest { - Fitter fitter; - CmdTemplate template; - String[] cmdStr, cmdStr2; - - @BeforeEach - public void before() throws Exception { - cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "/path/to/user/config", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - }; - cmdStr2 = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "/path/to/user/config", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - "-P", - "key1=value1, key2=value2, key5=\"key3=value3,key4=value4\" " - }; - template = new TestSparkCmdTemplate(); - fitter = new SingleTplFitter(); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: fit(TemplateFitterInput[] inputs) */ - @Test - public void testParseAndFit() throws Exception { - FitterResult[] results = new FitterResult[2]; - results[0] = fitter.fit(cmdStr, template); - results[1] = fitter.fit(cmdStr2, new TestSparkCmdTemplate()); - - assertTrue(results[0].getParsedTemplate() instanceof TestSparkCmdTemplate); - assertEquals( - results[0].getParsedTemplate().getOptionsMap().get("--cmd").getValue(), "show tables"); - assertNotEquals(results[0].getParsedTemplate(), template.getCopy()); - assertNotEquals(results[0].getParsedTemplate().getOptions(), template.getCopy().getOptions()); - assertNotEquals( - results[0].getParsedTemplate().getOptions().get(1), template.getCopy().getOptions().get(1)); - assertEquals( - results[0].getParsedTemplate().getOptions().get(1).getValue(), - template.getCopy().getOptions().get(1).getValue()); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParserTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParserTest.java deleted file mode 100644 index 61b8c72eab2..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParserTest.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.parser; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.ParamItem; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.command.template.TestParamMapper; -import org.apache.linkis.cli.core.interactor.command.template.TestSparkCmdTemplate; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -public class SingleCmdParserTest { - String[] cmdStr; - Map templateMap; - - @BeforeEach - public void before() throws Exception { - cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "/path/to/user/config", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - }; - - TestSparkCmdTemplate template = new TestSparkCmdTemplate(); - templateMap = new HashMap<>(); - templateMap.put(template.getCmdType().getName(), template); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: parse(String[] input) */ - @Test - public void testParse() { - - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(templateMap.get(TestCmdType.SPARK.getName())) - .setFitter(new SingleTplFitter()) - .setMapper(new TestParamMapper()); - - ParseResult result = parser.parse(cmdStr); - - assertEquals(result.getParams().getCmdType(), TestCmdType.SPARK); - - Map params = result.getParams().getParamItemMap(); - - assertEquals(params.size(), 21); - - List sortedKeys = params.keySet().stream().sorted().collect(Collectors.toList()); - assertEquals( - sortedKeys, - Arrays.asList( - "converted.args", - "converted.split", - "param.common.command", - "param.common.file", - "param.common.other.kv", - "param.help", - "param.primary.database", - "param.primary.password", - "param.primary.proxy", - "param.primary.proxyUser", - "param.primary.synckey", - "param.primary.user", - "param.primary.user.conf", - "param.spark.executor.cores", - "param.spark.executor.memory", - "param.spark.hiveconf", - "param.spark.name", - "param.spark.num.executors", - "param.spark.runtype", - "param.spark.shuffle.partitions", - "param.yarn.queue")); - assertEquals(result.getRemains().length, 0); - } - - /** Method: parsePrimary(String[] input) */ - @Test - public void testParsePrimary() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = SingleCmdParser.getClass().getMethod("parsePrimary", String[].class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: parseSingleSub(String[] remains) */ - @Test - public void testParseSingleSub() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = SingleCmdParser.getClass().getMethod("parseSingleSub", String[].class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: standardParse(String identifier, String[] args, CmdTemplate templateOri) */ - @Test - public void testStandardParse() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = SingleCmdParser.getClass().getMethod("standardParse", String.class, String[].class, CmdTemplate.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestParamMapper.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestParamMapper.java deleted file mode 100644 index 25f44d56c7c..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestParamMapper.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.template; - -import org.apache.linkis.cli.core.constants.TestConstants; -import org.apache.linkis.cli.core.interactor.command.parser.transformer.ParamKeyMapper; - -public class TestParamMapper extends ParamKeyMapper { - - @Override - public void initMapperRules() { - super.updateMapping(TestConstants.SPARK, "converted.cmd"); - super.updateMapping(TestConstants.PARAM_COMMON_ARGS, "converted.args"); - super.updateMapping(TestConstants.PARAM_COMMON_SPLIT, "converted.split"); - // super.updateMapping("key1", "spark.cmd"); //should throw exception - // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); - // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); - - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestSparkCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestSparkCmdTemplate.java deleted file mode 100644 index 4795f11c9c2..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestSparkCmdTemplate.java +++ /dev/null @@ -1,268 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.template; - -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.core.constants.TestConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; -import org.apache.linkis.cli.core.interactor.command.template.option.StdOption; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TestSparkCmdTemplate extends AbstractCmdTemplate { - protected StdOption databaseOp = - option( - null, - TestConstants.PARAM_DB, - new String[] {"-d", "--database"}, - "specify database", - true, - ""); - protected StdOption proxyOp = - option( - null, - TestConstants.PARAM_PROXY, - new String[] {"-x", "--proxy"}, - "specify proxy url", - true, - ""); - protected StdOption userOp = - option( - null, TestConstants.PARAM_USER, new String[] {"-u", "--user"}, "specify user", true, ""); - protected StdOption confOp = - option( - null, - TestConstants.PARAM_USR_CONF, - new String[] {"-c", "--conf"}, - "specify configuration from property file", - true, - ""); - private Logger logger = LoggerFactory.getLogger(TestSparkCmdTemplate.class); - private StdOption passwordOp = - option( - null, - TestConstants.PARAM_PASSWORD, - new String[] {"-pwd", "--passwd"}, - "specify user password", - true, - ""); - private StdOption syncOp = - option( - null, - TestConstants.PARAM_SYNC_KEY, - new String[] {"-sk", "--synckey"}, - "specify sync key", - true, - ""); - private StdOption proxyUserOp = - option( - null, - TestConstants.PARAM_PROXY_USER, - new String[] {"-pu", "--proxy-user"}, - "specify proxy user", - true, - ""); - - private StdOption helpOp = - option(null, TestConstants.PARAM_HELP, new String[] {"-h", "--help"}, "help info", true, ""); - - private StdOption filePara = - option( - null, - TestConstants.PARAM_COMMON_FILE, - new String[] {"--file", "-f"}, - "Spark SQL File to Execute!", - true, - ""); - - private StdOption commandPara = - option( - null, - TestConstants.PARAM_COMMON_CMD, - new String[] {"--cmd"}, - "Spark SQL Command to Execute!", - true, - ""); - - private StdOption argsPara = - option( - null, - TestConstants.PARAM_COMMON_ARGS, - new String[] {"--args", "-a"}, - "Set command args, k-v pairs delimited by comma, e.g. key1=value1,key2=value2,...", - true, - ""); - - private StdOption splitPara = - option( - null, - TestConstants.PARAM_COMMON_SPLIT, - new String[] {"--split", "-s"}, - "specify the split character string", - true, - ","); - - private StdOption queuePara = - option( - null, - TestConstants.PARAM_YARN_QUEUE, - new String[] {"--queue", "-q"}, - "specify the queue", - true, - "default"); - - private StdOption namePara = - option( - null, - TestConstants.PARAM_SPARK_NAME, - new String[] {"--name", "-n"}, - "specify the application name. WARNING:this option is deprecated. Linkis does not support this variable", - true, - ""); - - private StdOption> hiveconfPara = - option( - null, - TestConstants.PARAM_SPARK_HIVECONF, - new String[] {"--hiveconf", "-hc"}, - "specify the hiveconf setting,e.g. hive.cli.print.header=false", - true, - new HashMap<>()); - - private StdOption nePara = - option( - null, - TestConstants.PARAM_SPARK_NUM_EXECUTORS, - new String[] {"--num-executors", "-ne"}, - "specify the spark application container", - true, - 3); - - private StdOption ecPara = - option( - null, - TestConstants.PARAM_SPARK_EXECUTOR_CORES, - new String[] {"--executor-cores", "-ec"}, - "specify the spark application container vcores(less than queue's max vcores)", - true, - 2); - - private StdOption emPara = - option( - null, - TestConstants.PARAM_SPARK_EXECUTOR_MEMORY, - new String[] {"--executor-memory", "-em"}, - "specify the spark application executor's memory, 1.5G-2G/vcore", - true, - "4G"); - - private StdOption spPara = - option( - null, - TestConstants.PARAM_SPARK_SHUFFLE_PARTITIONS, - new String[] {"--shuffle-partitions", "-sp"}, - "specify the spark.sql.shuffle.partitions", - true, - 200); - - private StdOption otherPara = - option( - null, - TestConstants.PARAM_COMMON_OTHER_KV, - new String[] {"--other"}, - "specify the other parameters", - true, - ""); - - private StdOption runTypePara = - option( - null, - TestConstants.PARAM_SPARK_RUNTYPE, - new String[] {"--runtype"}, - "specify the runtype parameters: sql pyspark scala", - true, - "sql"); - - public TestSparkCmdTemplate() { - super(TestCmdType.SPARK); - } - - @Override - public void checkParams() throws CommandException {} - - @Override - protected Object clone() throws CloneNotSupportedException { - return super.clone(); - } - - @Override - public TestSparkCmdTemplate getCopy() { - return (TestSparkCmdTemplate) super.getCopy(); - } - - public void prepare() throws LinkisClientRuntimeException {} - - private Map parseOtherMap(String otherStr) { - Map otherMap = new HashMap<>(); - /** - * split by space-chara, but if space char is within \"\", then do not split. e.g. "a b c" will - * not be splited - */ - otherStr = otherStr.replace("\'", "\""); - String[] arr = otherStr.trim().split("\\s(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); - for (int i = 0; i < arr.length; i++) { - String candidate = arr[i]; - - if (StringUtils.equalsIgnoreCase(candidate, "--conf") && i < arr.length - 1) { - // '--conf' and has next - i++; // move to next - candidate = arr[i]; - int idx = StringUtils.indexOf(candidate, "="); - if (idx != -1) { - String key = StringUtils.substring(candidate, 0, idx).trim(); - String value = StringUtils.substring(candidate, idx + 1).trim(); - otherMap.put(key, value); - } - } else { - // without '--conf' - int idx = StringUtils.indexOf(candidate, " "); - if (idx != -1) { - // e.g. '--driver-memory 2G' - String key = StringUtils.substring(candidate, 0, idx).trim(); - String value = StringUtils.substring(candidate, idx + 1).trim(); - otherMap.put(key, value); - } else if (i < arr.length - 1) { - // e.g. --driver-memory 2G - String key = candidate.trim(); - i++; // move to next - candidate = arr[i]; - String value = candidate.trim(); - otherMap.put(key, value); - } - } - } - return otherMap; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoaderTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoaderTest.java deleted file mode 100644 index 9bef89b3f35..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoaderTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; - -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -public class StdPropsLoaderTest { - PropertiesLoader loader; - - @BeforeEach - public void before() throws Exception { - System.setProperty("conf.root", "src/test/resources/conf/"); - System.setProperty("conf.file", "linkis-cli.properties"); - String configPath = System.getProperty("conf.root"); - String defaultConfFileName = System.getProperty("conf.file"); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - // load all config files - loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: setPropertiesReaders(PropertiesReader[] readers) */ - @Test - public void testSetPropertiesReaders() throws Exception { - // TODO: Test goes here... - loader.setPropertiesReaders(new PropertiesReader[0]); - } - - /** Method: getAllReadersAsMap() */ - @Test - public void testGetAllReadersAsMap() throws Exception { - // TODO: Test goes here... - } - - /** Method: addPropertiesReader(PropertiesReader reader) */ - @Test - public void testAddPropertiesReader() throws Exception { - // TODO: Test goes here... - } - - /** Method: addPropertiesReaders(PropertiesReader[] readers) */ - @Test - public void testAddPropertiesReaders() throws Exception { - // TODO: Test goes here... - } - - /** Method: getPropertiesReader(String identifier) */ - @Test - public void testGetPropertiesReader() throws Exception { - // TODO: Test goes here... - } - - /** Method: removePropertiesReader(String identifier) */ - @Test - public void testRemovePropertiesReader() throws Exception { - // TODO: Test goes here... - } - - /** Method: loadProperties() */ - @Test - public void testLoadProperties() throws Exception { - ClientProperties[] loaderResult = loader.loadProperties(); - - List properties = - Arrays.stream(loaderResult) - .sorted((p1, p2) -> p1.size() - p2.size()) - .collect(Collectors.toList()); - - assertEquals(2, properties.size()); - assertEquals(properties.get(0).getPropsId(), "user.properties"); - assertEquals(properties.get(0).size(), 4); - - assertEquals(properties.get(1).getPropsId(), "linkis-cli.properties"); - assertEquals(properties.get(1).size(), 8); - } - - /** Method: checkInit() */ - @Test - public void testCheckInit() throws Exception { - // TODO: Test goes here... - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/var/StdVarAccessTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/var/StdVarAccessTest.java deleted file mode 100644 index 497a539dd50..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/var/StdVarAccessTest.java +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.var; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.core.constants.TestConstants; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.Parser; -import org.apache.linkis.cli.core.interactor.command.parser.SingleCmdParser; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.command.template.TestSparkCmdTemplate; -import org.apache.linkis.cli.core.interactor.properties.PropertiesLoader; -import org.apache.linkis.cli.core.interactor.properties.PropsFilesScanner; -import org.apache.linkis.cli.core.interactor.properties.StdPropsLoader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; -import org.apache.linkis.cli.core.interactor.validate.ParsedTplValidator; - -import org.apache.commons.lang3.StringUtils; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -public class StdVarAccessTest { - String[] cmdStr; - VarAccess stdVarAccess; - - @BeforeEach - public void before() throws Exception { - cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "src/test/resources/conf/user.properties", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - }; - - TestSparkCmdTemplate template = new TestSparkCmdTemplate(); - Map templateMap = new HashMap<>(); - templateMap.put(template.getCmdType().getName(), template); - - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(templateMap.get(TestCmdType.SPARK.getName())) - .setFitter(new SingleTplFitter()); - - ParseResult result = parser.parse(cmdStr); - - ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); - parsedTplValidator.doValidation(result.getParsedTemplate()); - - System.setProperty("conf.root", "src/test/resources/conf/"); - System.setProperty("conf.file", "linkis-cli.properties"); - String configPath = System.getProperty("conf.root"); - String defaultConfFileName = System.getProperty("conf.file"); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - String userConfPath = - (String) result.getParams().getParamItemMap().get(TestConstants.PARAM_USR_CONF).getValue(); - if (StringUtils.isNotBlank(userConfPath)) { - PropertiesReader reader = new PropsFileReader(); - reader.setPropsId("user.conf"); - reader.setPropsPath(userConfPath); - readersList.add(reader); - } else { - } - // load all config files - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - ClientProperties[] loaderResult = loader.loadProperties(); - Map propertiesMap = new HashMap<>(); - for (ClientProperties properties : loaderResult) { - propertiesMap.put(properties.getPropsId(), properties); - } - - stdVarAccess = - new StdVarAccess() - .setCmdParams(result.getParams()) - .setUserConf(propertiesMap.get("user.conf")) - .setDefaultConf(propertiesMap.get(defaultConfFileName)) - .init(); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: setPrimaryParam(Params primaryParam) */ - @Test - public void testSetPrimaryParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: getPrimaryParam(String identifier) */ - @Test - public void testGetPrimaryParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: setCmdParams(Params subParam) */ - @Test - public void testSetSubParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: getSubParam(String identifier) */ - @Test - public void testGetSubParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: setUserConf(ClientProperties userConf) */ - @Test - public void testSetUserConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: getUserConf(String identifier) */ - @Test - public void testGetUserConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: setDefaultConf(ClientProperties defaultConf) */ - @Test - public void testSetDefaultConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: getDefaultConf(String identifier) */ - @Test - public void testGetDefaultConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: checkInit() */ - @Test - public void testCheckInit() throws Exception { - // TODO: Test goes here... - } - - /** Method: getVar(Class clazz, String key) */ - @Test - public void testGetVar() throws Exception { - System.out.println(stdVarAccess.getVar(String.class, TestConstants.PARAM_COMMON_CMD)); - assertEquals(stdVarAccess.getVar(String.class, TestConstants.PARAM_COMMON_CMD), "show tables"); - System.out.println(stdVarAccess.getVar(String.class, "user.props")); - assertEquals(stdVarAccess.getVar(String.class, "wds.linkis.client.not.exist"), null); - System.out.println( - stdVarAccess.getVar( - Integer.class, TestConstants.PARAM_SPARK_EXECUTOR_CORES)); // see if priority works - assertEquals( - (long) stdVarAccess.getVar(Integer.class, TestConstants.PARAM_SPARK_EXECUTOR_CORES), 4); - assertEquals((long) stdVarAccess.getVar(Integer.class, "conf.prop.integer"), 9); - assertEquals(stdVarAccess.getVar(String.class, "conf.prop.string"), "str"); - assertEquals( - stdVarAccess.getVar(String.class, "wds.linkis.client.param.conf.spark.executor.memory"), - "11111G"); - - System.out.println(stdVarAccess.getAllVarKeys().length); - System.out.println(Arrays.toString(stdVarAccess.getAllVarKeys())); - assertTrue(stdVarAccess.getAllVarKeys().length != 0); - } - - /** Method: getVarOrDefault(Class clazz, String key, T defaultValue) */ - @Test - public void testGetVarOrDefault() throws Exception { - // TODO: Test goes here... - } - - /** Method: getAllVarKeys() */ - @Test - public void testGetAllVarKeys() throws Exception { - // TODO: Test goes here... - } - - /** Method: getVarFromParam(Class clazz, String key, Params param) */ - @Test - public void testGetVarFromParam() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getVarFromParam", Class.class, String.class, Params.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: getDefaultVarFromParam(Class clazz, String key, Params param) */ - @Test - public void testGetDefaultVarFromParam() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getDefaultVarFromParam", Class.class, String.class, Params.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: getVarFromCfg(Class clazz, String key, ClientProperties conf) */ - @Test - public void testGetVarFromCfg() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getVarFromCfg", Class.class, String.class, ClientProperties.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** - * Method: getVarFromCfgGivenConverter(String key, ClientProperties conf, - * AbstractStringConverter converter) - */ - @Test - public void testGetVarFromCfgGivenConverter() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getVarFromCfgGivenConverter", String.class, ClientProperties.class, AbstractStringConverter.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/present/PresentModeImplTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/present/PresentModeImplTest.java deleted file mode 100644 index 8f776bef81b..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/present/PresentModeImplTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class PresentModeImplTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - String stdoutName = PresentModeImpl.STDOUT.getName(); - String textFileName = PresentModeImpl.TEXT_FILE.getName(); - - Assertions.assertEquals("STDOUT", stdoutName); - Assertions.assertEquals("TEXT_FILE", textFileName); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/utils/TestUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/utils/TestUtils.java deleted file mode 100644 index 61cdd6aa7fc..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/utils/TestUtils.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.utils; - -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.File; -import java.util.HashMap; -import java.util.Map; - -public class TestUtils { - private static String replaceParas(String context, Map m) { - if (context == null) { - return null; - } - for (Map.Entry entry : m.entrySet()) { - if (entry.getKey() == null || entry.getValue() == null) { - continue; - } - String key = "[#" + entry.getKey().trim() + "]"; - String value = entry.getValue().trim(); - - context = StringUtils.replace(context, key, value); - } - return context; - } - - public static Map parseArgMap(String str, String separator) { - - Map argsProps = new HashMap<>(); - String[] args = StringUtils.splitByWholeSeparator(str, separator); - - for (String arg : args) { - int index = arg.indexOf("="); - if (index != -1) { - argsProps.put(arg.substring(0, index).trim(), arg.substring(index + 1).trim()); - } - } - return argsProps; - } - - public static String readShellFileAndReplaceParas(String filename, String argsStr, String split) - throws Exception { - - String fileContent; - - File inputFile = new File(filename); - - fileContent = FileUtils.readFileToString(inputFile); - - Map argsMap = parseArgMap(argsStr, split); - - fileContent = replaceParas(fileContent, argsMap); - - return fileContent; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/linkis-cli.properties deleted file mode 100644 index e0f85cf3e5d..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/linkis-cli.properties +++ /dev/null @@ -1,34 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -wds.linkis.client.common.creator=IDE3 -wds.linkis.client.common.gatewayUrl=http://127.0.0.1:8088 -#wds.linkis.client.common.authStrategy -#wds.linki.clients.common.tokenKey -#wds.linkis.client.common.tokenValue -# -#wds.linkis.client.common.submitUser -#wds.linkis.client.common.submitPassword -#wds.linkis.client.common.proxyUser -wds.linkis.client.param.conf.wds.linkis.yarnqueue=q02 -wds.linkis.client.param.conf.yarnqueue.cores.max=233 -wds.linkis.client.param.conf.yarnqueue.memory.max=233G -wds.linkis.client.param.conf.spark.executor.instances=9 -wds.linkis.client.param.conf.spark.executor.cores=9 -wds.linkis.client.param.conf.spark.executor.memory=9 -#wds.linkis.client.label="key1=val1,key2=val2" -#wds.linkis.client.param.conf="key1=val1,key2=val2" -#wds.linkis.client.param.var="key1=val1,key2=val2" -#wds.linkis.client.source="key1=val1,key2=val2" \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/user.properties b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/user.properties deleted file mode 100644 index faa650fc5c8..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/user.properties +++ /dev/null @@ -1,19 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -user.props=xxxxxxxxxxxxx -conf.prop.integer=9 -conf.prop.string=str -wds.linkis.client.param.conf.spark.executor.memory=11111G \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/log4j2.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/log4j2.xml deleted file mode 100644 index c7446788769..00000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/log4j2.xml +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - ${sys:log.path} - ${sys:log.file} - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml index e9efc432d5a..76723c4adb0 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml +++ b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml @@ -25,15 +25,24 @@ ../../../pom.xml linkis-cli - pom - - - linkis-cli-common - linkis-cli-core - linkis-cli-application - + jar + + org.apache.linkis + linkis-computation-client + ${project.version} + + + org.apache.linkis + linkis-gateway-httpclient-support + ${project.version} + + + org.reflections + reflections + ${reflections.version} + org.apache.linkis linkis-common @@ -42,17 +51,53 @@ org.apache.commons commons-lang3 + ${commons-lang3.version} com.google.code.gson gson ${gson.version} - ${project.artifactId}-${project.version} + + + true + ${basedir}/src/main/resources + + + + + org.apache.maven.plugins + maven-assembly-plugin + false + + false + out + false + false + + /src/main/assembly/distribution.xml + + + + + make-assembly + + single + + package + + + /src/main/assembly/distribution.xml + + + + + + diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-client/linkis-cli/src/main/assembly/distribution.xml new file mode 100644 index 00000000000..24463e39afa --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/assembly/distribution.xml @@ -0,0 +1,123 @@ + + + + + linkis-cli + + dir + + false + + + + lib + true + true + false + false + true + runtime + + org.apache.hadoop:*:jar + org.apache.spark:*:jar + org.apache.zookeeper:*:jar + org.apache.avro:*:jar + com.google.code.findbugs:annotations:jar + commons-beanutils:commons-beanutils:jar + commons-codec:commons-codec:jar + commons-collections:commons-collections:jar + commons-io:commons-io:jar + org.checkerframework:checker-qual:jar + commons-lang:commons-lang:jar + org.apache.commons:commons-lang3:jar + commons-net:commons-net:jar + com.google.errorprone:error_prone_annotations:jar + com.google.guava:failureaccess:jar + com.google.code.gson:gson:jar + com.google.guava:guava:jar + commons-httpclient:commons-httpclient:jar + commons-net:commons-net:jar + com.google.errorprone:error_prone_annotations:jar + org.apache.httpcomponents:httpcore:jar + com.google.j2objc:j2objc-annotations:jar + com.fasterxml.jackson.core:jackson-annotations:jar + com.fasterxml.jackson.core:jackson-core:jar + com.fasterxml.jackson.core:jackson-databind:jar + com.fasterxml.jackson.module:jackson-module-parameter-names:jar + com.fasterxml.jackson.module:jackson-module-scala_2.11:jar + jakarta.annotation:jakarta.annotation-api:jar + org.javassist:javassist:jar + org.json4s:json4s-ast_2.11:jar + org.json4s:json4s-core_2.11:jar + org.json4s:json4s-scalap_2.11:jar + org.json4s:json4s-jackson_2.11:jar + org.apache.logging.log4j:log4j-api:jar + org.apache.logging.log4j:log4j-core:jar + org.apache.logging.log4j:log4j-jul:jar + org.apache.logging.log4j:log4j-slf4j-impl:jar + com.thoughtworks.paranamer:paranamer:jar + org.reflections:reflections:jar + org.scala-lang:scala-compiler:jar + org.scala-lang:scala-library:jar + org.scala-lang:scala-reflect:jar + org.scala-lang:scalap:jar + org.slf4j:slf4j-api:jar + + + + + + + ${basedir} + + + README* + LICENSE* + NOTICE* + + + + + + ${basedir}/src/main/resources/conf/ + + conf + + **/* + + 0777 + unix + + + + + ${basedir}/src/main/resources/bin/ + + bin + + **/* + + 0777 + unix + + + + + + \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java new file mode 100644 index 00000000000..cfa57d4e266 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java @@ -0,0 +1,185 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.CliCmdType; +import org.apache.linkis.cli.application.interactor.command.CmdTemplateFactory; +import org.apache.linkis.cli.application.interactor.command.fitter.SingleTplFitter; +import org.apache.linkis.cli.application.interactor.command.parser.Parser; +import org.apache.linkis.cli.application.interactor.command.parser.SingleCmdParser; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.interactor.context.CliCtxImpl; +import org.apache.linkis.cli.application.interactor.properties.ClientProperties; +import org.apache.linkis.cli.application.interactor.properties.PropertiesLoader; +import org.apache.linkis.cli.application.interactor.properties.PropsFilesScanner; +import org.apache.linkis.cli.application.interactor.properties.reader.PropertiesReader; +import org.apache.linkis.cli.application.interactor.properties.reader.PropsFileReader; +import org.apache.linkis.cli.application.interactor.validate.ParsedTplValidator; +import org.apache.linkis.cli.application.interactor.var.VarAccessImpl; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; + +import java.io.*; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CtxBuilder { + private static Logger logger = LoggerFactory.getLogger(CtxBuilder.class); + + /** generate Templates load env variables TODO: load version info */ + public static CliCtx buildCtx(String[] args) throws LinkisClientRuntimeException { + /* + user input + */ + CmdTemplate template = CmdTemplateFactory.getTemplateCopy(CliCmdType.UNIVERSAL); + Parser parser = + new SingleCmdParser() + .setMapper(null) + .setTemplate(template) + .setFitter(new SingleTplFitter()); + + ParseResult result = parser.parse(args); + + ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); + + parsedTplValidator.doValidation(result.getParsedTemplate()); + + Params params = result.getParams(); + logger.debug("==========params============\n" + CliUtils.GSON.toJson(params)); + + /* + VarAccess for sys_prop, sys_env + */ + + Map propertiesMap = new HashMap<>(); + + LoggerManager.getInformationLogger() + .info( + "LogFile path: " + + System.getProperty(CliKeys.LOG_PATH_KEY) + + "/" + + System.getProperty(CliKeys.LOG_FILE_KEY)); + /* + default config, -Dconf.root & -Dconf.file specifies config path + */ + // scan config files given root path + String configPath = System.getProperty(CliKeys.CLIENT_CONFIG_ROOT_KEY); + String defaultConfFileName = + System.getProperty(CliKeys.DEFAULT_CONFIG_FILE_NAME_KEY, CliConstants.DEFAULT_CONFIG_NAME); + + if (StringUtils.isBlank(configPath)) { + throw new PropsException( + "PRP0007", + ErrorLevel.ERROR, + CommonErrMsg.PropsLoaderErr, + "configuration root path specified by env variable: " + + CliKeys.CLIENT_CONFIG_ROOT_KEY + + " is empty."); + } + + List readersList = + new PropsFilesScanner().getPropsReaders(configPath); // +1 user config + /* + user defined config + */ + String userConfPath = null; + if (params.containsParam(CliKeys.LINKIS_CLIENT_USER_CONFIG)) { + userConfPath = + (String) params.getParamItemMap().get(CliKeys.LINKIS_CLIENT_USER_CONFIG).getValue(); + } + if (StringUtils.isNotBlank(userConfPath)) { + PropertiesReader reader = + new PropsFileReader() + .setPropsId(CliKeys.LINKIS_CLIENT_USER_CONFIG) + .setPropsPath(userConfPath); + readersList.add(reader); + } else { + LoggerManager.getInformationLogger() + .info("User does not provide usr-configuration file. Will use default config"); + } + /* + load properties + */ + PropertiesLoader loader = + new PropertiesLoader() + .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); + ClientProperties[] loaderResult = loader.loadProperties(); + for (ClientProperties properties : loaderResult) { + if (StringUtils.equals(properties.getPropsId(), CliKeys.LINKIS_CLIENT_USER_CONFIG)) { + for (Map.Entry prop : properties.entrySet()) { + if (StringUtils.startsWith( + (String) prop.getKey(), CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE)) { + throw new PropsException( + "PRP0007", + ErrorLevel.ERROR, + CommonErrMsg.PropsLoaderErr, + "User cannot specify non-customizable configuration: " + prop.getKey()); + } + } + } + propertiesMap.put(properties.getPropsId(), properties); + } + + /* + VarAccess for cmd, config + */ + VarAccess varAccess = + new VarAccessImpl() + .setCmdParams(params) + .setUserConf(propertiesMap.get(CliKeys.LINKIS_CLIENT_USER_CONFIG)) + .setDefaultConf(propertiesMap.get(defaultConfFileName)) + .init(); + logger.info("==========std_var============\n" + CliUtils.GSON.toJson(varAccess)); + + Properties props = new Properties(); + try (InputStream inputStream = + CtxBuilder.class.getClassLoader().getResourceAsStream("version.properties")) { + try (InputStreamReader reader = new InputStreamReader(inputStream)) { + try (BufferedReader bufferedReader = new BufferedReader(reader)) { + props.load(bufferedReader); + } + } + } catch (Exception e) { + logger.warn("Failed to load version info", e); + } + + String verion = props.getProperty(CliKeys.VERSION); + + Map extraMap = new HashMap<>(); + extraMap.put(CliKeys.VERSION, verion); + + return new CliCtxImpl(params.getCmdType(), template, varAccess, extraMap); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java new file mode 100644 index 00000000000..24ee3c8dccb --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.interactor.command.CmdTemplateFactory; +import org.apache.linkis.cli.application.interactor.command.template.UniversalCmdTemplate; +import org.apache.linkis.cli.application.interactor.job.help.HelpJob; +import org.apache.linkis.cli.application.interactor.job.interactive.InteractiveJob; +import org.apache.linkis.cli.application.interactor.job.jobcmd.JobCmdJob; +import org.apache.linkis.cli.application.interactor.job.once.LinkisOnceJob; +import org.apache.linkis.cli.application.interactor.job.version.VersionJob; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.once.OnceOperBuilder; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperBuilder; +import org.apache.linkis.cli.application.present.HelpPresenter; +import org.apache.linkis.cli.application.present.model.HelpInfoModel; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LinkisClientApplication { + private static Logger logger = LoggerFactory.getLogger(LinkisClientApplication.class); + + private static boolean showHelp = false; + + public static void main(String[] args) { + /* + generate template + */ + CmdTemplateFactory.register(new UniversalCmdTemplate()); + + /* + build ctx + */ + CliCtx ctx = null; + try { + ctx = CtxBuilder.buildCtx(args); + } catch (CommandException e) { + CmdTemplate template = CmdTemplateFactory.getTemplateOri(e.getCmdType()); + if (template != null) { + HelpInfoModel model = new HelpInfoModel(); + model.buildModel(template); + new HelpPresenter().present(model); + } + LoggerManager.getInformationLogger().error("Failed to build CliCtx", e); + System.exit(-1); + } + + /* + prepare oper + */ + OperManager.register(CliKeys.Linkis_OPER, new LinkisOperBuilder()); + OperManager.register(CliKeys.LINKIS_ONCE, new OnceOperBuilder()); + + /* + run job + */ + Job job; + if (isVersionCmd(ctx)) { + job = new VersionJob(); + } else if (isHelp(ctx)) { + job = new HelpJob(); + } else if (isJobCmd(ctx)) { + job = new JobCmdJob(); + } else if (isOnceCmd(ctx)) { + job = new LinkisOnceJob(); + } else { + job = new InteractiveJob(); + } + job.build(ctx); + JobResult result; + try { + Runtime.getRuntime() + .addShutdownHook( + new Thread( + () -> { + if (job != null) { + job.onDestroy(); + } + })); + result = job.run(); + } catch (Exception e) { + logger.error("Failed to execute job", e); + result = + new JobResult() { + @Override + public Boolean isSuccess() { + return false; + } + + @Override + public String getMessage() { + return "Failed to execute job" + ExceptionUtils.getStackTrace(e); + } + + @Override + public Map getExtraMessage() { + return new HashMap<>(); + } + }; + } + + /* + process result + */ + printIndicator(result); + + SchedulerManager.shutDown(); + + if (result.isSuccess()) { + System.exit(0); + } else { + System.exit(-1); + } + } + + private static void printIndicator(JobResult jobResult) { + if (jobResult.isSuccess()) { + LoggerManager.getPlaintTextLogger().info(CliConstants.SUCCESS_INDICATOR); + } else { + LoggerManager.getPlaintTextLogger().info(jobResult.getMessage()); + StringBuilder b = new StringBuilder(); + for (Map.Entry e : jobResult.getExtraMessage().entrySet()) { + b.append(e.getKey()).append(":").append(e.getValue()).append(System.lineSeparator()); + } + LoggerManager.getPlaintTextLogger().info(b.toString()); + LoggerManager.getPlaintTextLogger().info(CliConstants.FAILURE_INDICATOR); + } + } + + private static Boolean isHelp(CliCtx ctx) { + if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_HELP_OPT)) { + return true; + } + return false; + } + + private static Boolean isVersionCmd(CliCtx ctx) { + if (ctx.getVarAccess().hasVar(CliKeys.VERSION)) { + return true; + } + return false; + } + + private static Boolean isJobCmd(CliCtx ctx) { + if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_KILL_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_STATUS_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_DESC_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LOG_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_RESULT_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LIST_OPT)) { + return true; + } + return false; + } + + private static Boolean isOnceCmd(CliCtx ctx) { + String mode = + ctx.getVarAccess() + .getVarOrDefault(String.class, CliKeys.LINKIS_CLIENT_MODE_OPT, CliConstants.UJES_MODE); + return StringUtils.equalsIgnoreCase(mode, CliConstants.ONCE_MODE); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliConstants.java new file mode 100644 index 00000000000..d277f435fe3 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliConstants.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.constants; + +public class CliConstants { + + public static final String DEFAULT_CONFIG_NAME = "linkis-cli.properties"; + public static final Long JOB_QUERY_SLEEP_MILLS = 2000l; + public static final String RESULTSET_LOGO = "============ RESULT SET {0} ============"; + + public static final String RESULTSET_META_BEGIN_LOGO = "----------- META DATA ------------"; + public static final String RESULTSET_META_END_LOGO = "------------ END OF META DATA ------------"; + + public static final String RESULTSET_SEPARATOR_LOGO = "------------------------"; + + public static final Integer RESULTSET_PAGE_SIZE = 5000; + + public static final String JOB_CREATOR_DEFAULT = "LINKISCLI"; + + public static final String JOB_CREATOR_ASYNC_DEFAULT = "LINKISCLIASYNC"; + + public static final String DUMMY_CID = "dummy"; + + public static final String LINKIS_CLI = "LinkisCli"; + + public static final String UJES_MODE = "ujes"; + + public static final String ONCE_MODE = "once"; + + public static final Integer REQUEST_MAX_RETRY_TIME = 3; + + public static final String UNIVERSAL_SUBCMD = "linkis-cli"; + + public static final String UNIVERSAL_SUBCMD_DESC = + "command for all types of jobs supported by Linkis"; + + public static final String SUCCESS_INDICATOR = "############Execute Success!!!########"; + + public static final String FAILURE_INDICATOR = "############Execute Error!!!########"; + + public static final String ARRAY_SEQ = "@#@"; + + public static final String ARRAY_SEQ_REGEX = "(?=([^\"]*\"[^\"]*\")*[^\"]*$)"; + + public static final int MAX_NUM_OF_COMMAND_ARGUEMENTS = 10; + + public static final String CONFIG_DIR = "config.path"; + + public static final String[] CONFIG_EXTENSION = {"properties"}; + + public static final String SYSTEM_PROPERTIES_IDENTIFIER = "SYS_PROP"; + + public static final String SYSTEM_ENV_IDENTIFIER = "SYS_ENV"; +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliKeys.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliKeys.java new file mode 100644 index 00000000000..966836e0bf5 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliKeys.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.constants; + +public class CliKeys { + + /** User Not configurable */ + public static final String ADMIN_USERS = "hadoop,root,shangda"; + + public static final String LINKIS_CLIENT_NONCUSTOMIZABLE = "wds.linkis.client.noncustomizable"; + public static final String LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_USER_SPECIFICATION = + LINKIS_CLIENT_NONCUSTOMIZABLE + + ".enable.user.specification"; // allow user to specify submit user + public static final String LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_PROXY_USER = + LINKIS_CLIENT_NONCUSTOMIZABLE + ".enable.proxy.user"; // allow user to specify proxy user + + /** In env */ + public static final String LOG_PATH_KEY = "log.path"; + + public static final String LOG_FILE_KEY = "log.file"; + + public static final String CLIENT_CONFIG_ROOT_KEY = "conf.root"; + public static final String DEFAULT_CONFIG_FILE_NAME_KEY = "conf.file"; + public static final String LINUX_USER_KEY = "user.name"; + + public static final String VERSION = "cli.version"; + + /** Configurable */ + /* + execution type + */ + public static final String JOB_EXEC = "wds.linkis.client.exec"; + + public static final String JOB_EXEC_CODE = JOB_EXEC + ".code"; + + /* + jobContent type + */ + public static final String JOB_CONTENT = "wds.linkis.client.jobContent"; + + /* + source + */ + public static final String JOB_SOURCE = "wds.linkis.client.source"; + public static final String JOB_SOURCE_SCRIPT_PATH = + JOB_SOURCE + "." + LinkisKeys.KEY_SCRIPT_PATH; // corresponds to server api. + + /* + params + */ + public static final String JOB_PARAM_CONF = "wds.linkis.client.param.conf"; + public static final String JOB_PARAM_RUNTIME = "wds.linkis.client.param.runtime"; + public static final String JOB_PARAM_VAR = "wds.linkis.client.param.var"; + + /* + labels + */ + public static final String JOB_LABEL = "wds.linkis.client.label"; + public static final String JOB_LABEL_ENGINE_TYPE = + JOB_LABEL + "." + LinkisKeys.KEY_ENGINETYPE; // corresponds to server api. + public static final String JOB_LABEL_CODE_TYPE = + JOB_LABEL + "." + LinkisKeys.KEY_CODETYPE; // corresponds to server api. + public static final String JOB_LABEL_EXECUTEONCE = + JOB_LABEL + "." + LinkisKeys.KEY_EXECUTEONCE; // corresponds to server api. + public static final String JOB_LABEL_CLUSTER = + JOB_LABEL + "." + LinkisKeys.KEY_CLUSTER; // corresponds to server api. + + /* + Job command + */ + public static final String LINKIS_CLIENT_JOB = "wds.linkis.client.job"; + public static final String LINKIS_CLIENT_JOB_TYPE = "wds.linkis.client.job.type"; + public static final String LINKIS_CLIENT_JOB_ID = "wds.linkis.client.job.id"; + + /* + common + */ + public static final String LINKIS_CLIENT_COMMON = "wds.linkis.client.common"; + public static final String LINKIS_CLIENT_KILL_OPT = LINKIS_CLIENT_COMMON + ".kill"; + public static final String LINKIS_CLIENT_STATUS_OPT = LINKIS_CLIENT_COMMON + ".status"; + public static final String LINKIS_CLIENT_ASYNC_OPT = LINKIS_CLIENT_COMMON + ".async.submit"; + public static final String LINKIS_CLIENT_HELP_OPT = LINKIS_CLIENT_COMMON + ".help"; + public static final String LINKIS_CLIENT_DESC_OPT = LINKIS_CLIENT_COMMON + ".desc"; + public static final String LINKIS_CLIENT_LOG_OPT = LINKIS_CLIENT_COMMON + ".log"; + public static final String LINKIS_CLIENT_RESULT_OPT = LINKIS_CLIENT_COMMON + ".result"; + public static final String LINKIS_CLIENT_LIST_OPT = LINKIS_CLIENT_COMMON + ".list"; + public static final String LINKIS_CLIENT_MODE_OPT = LINKIS_CLIENT_COMMON + ".mode"; + public static final String LINKIS_CLIENT_USER_CONFIG = LINKIS_CLIENT_COMMON + ".user.conf"; + public static final String LINKIS_CLIENT_DEFAULT_CONFIG = LINKIS_CLIENT_COMMON + ".default.conf"; + public static final String LINKIS_COMMON_GATEWAY_URL = LINKIS_CLIENT_COMMON + ".gatewayUrl"; + public static final String LINKIS_COMMON_DIAPLAY_META_LOGO = + LINKIS_CLIENT_COMMON + ".display.meta.log"; + public static final String LINKIS_COMMON_LOG_FROMLINE = LINKIS_CLIENT_COMMON + ".fromline"; + public static final String LINKIS_COMMON_RESULT_FROMPAGE = LINKIS_CLIENT_COMMON + ".frompage"; + public static final String LINKIS_COMMON_RESULT_FROMIDX = LINKIS_CLIENT_COMMON + ".fromidx"; + public static final String LINKIS_COMMON_RESULTPATHS = LINKIS_CLIENT_COMMON + ".resultpaths"; + public static final String JOB_EXTRA_ARGUMENTS = + LINKIS_CLIENT_COMMON + "." + LinkisKeys.EXTRA_ARGUMENTS; + public static final String JOB_COMMON_CODE_PATH = LINKIS_CLIENT_COMMON + ".code.path"; + + // all static token , default static + public static final String LINKIS_CLIENT_COMMON_OUTPUT_PATH = + LINKIS_CLIENT_COMMON + ".output.path"; + + public static final String LINKIS_COMMON_AUTHENTICATION_STRATEGY = + LINKIS_CLIENT_COMMON + ".authStrategy"; + public static final String LINKIS_COMMON_TOKEN_KEY = LINKIS_CLIENT_COMMON + ".tokenKey"; + public static final String LINKIS_COMMON_TOKEN_VALUE = LINKIS_CLIENT_COMMON + ".tokenValue"; + + public static final String JOB_COMMON_SUBMIT_USER = LINKIS_CLIENT_COMMON + ".submitUser"; + public static final String JOB_COMMON_SUBMIT_PASSWORD = LINKIS_CLIENT_COMMON + ".submitPassword"; + public static final String JOB_COMMON_PROXY_USER = LINKIS_CLIENT_COMMON + ".proxyUser"; + public static final String JOB_COMMON_CREATOR = LINKIS_CLIENT_COMMON + ".creator"; + + public static final String UJESCLIENT_COMMON_CONNECTT_TIMEOUT = + LINKIS_CLIENT_COMMON + ".connectionTimeout"; + public static final String UJESCLIENT_COMMON_DISCOVERY_ENABLED = + LINKIS_CLIENT_COMMON + ".discoveryEnabled"; + public static final String UJESCLIENT_COMMON_LOADBALANCER_ENABLED = + LINKIS_CLIENT_COMMON + ".loadbalancerEnabled"; + public static final String UJESCLIENT_COMMON_MAX_CONNECTION_SIZE = + LINKIS_CLIENT_COMMON + ".maxConnectionSize"; + public static final String UJESCLIENT_COMMON_RETRY_ENABLED = + LINKIS_CLIENT_COMMON + ".retryEnabled"; + public static final String UJESCLIENT_COMMON_READTIMEOUT = LINKIS_CLIENT_COMMON + ".readTimeout"; + public static final String UJESCLIENT_COMMON_DWS_VERSION = LINKIS_CLIENT_COMMON + ".dwsVersion"; + + public static final String LINKIS_CLIENT_COMMON_RESULT_SET_PAGE_SIZE = + LINKIS_CLIENT_COMMON + ".resultset.page.size"; + + public static final String Linkis_OPER = "linkis.oper"; + public static final String LINKIS_ONCE = "linkis.once"; +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java index 9488c25f4a4..170f1a8f862 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java @@ -34,6 +34,7 @@ public class LinkisKeys { public static final String KEY_EXECID = "execId"; public static final String KEY_UMUSER = "umUser"; public static final String KEY_EXECUTEONCE = "executeOnce"; + public static final String KEY_CLUSTER = "yarnCluster"; public static final String KEY_TENANT = "tenant"; public static final String META_DATA_COLUMN_NAME = "columnName"; public static final String KEY_SHELL_WORKING_DIRECTORY = @@ -46,4 +47,5 @@ public class LinkisKeys { public static final String KEY_YARN_QUEUE = "wds.linkis.rm.yarnqueue"; public static final String KEY_HIVE_RESULT_DISPLAY_TBALE = "hive.resultset.use.unique.column.names"; + public static final String CLI_VERSION = "cli.version"; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdOption.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdOption.java index f9b6924be59..1583090c88c 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdOption.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; public interface CmdOption extends Cloneable { String getParamName(); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdTemplate.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdTemplate.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdTemplate.java index da802bc5ff8..94677774a8f 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdTemplate.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdTemplate.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; import java.util.List; import java.util.Map; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdType.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdType.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdType.java index 6819fdb92b7..2f9dd803e80 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdType.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdType.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; public interface CmdType extends Cloneable { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/ParamItem.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/ParamItem.java similarity index 96% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/ParamItem.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/ParamItem.java index 02a33cfc95d..c8a28d9009a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/ParamItem.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/ParamItem.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; public class ParamItem { private String keyPrefix; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/Params.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/Params.java similarity index 97% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/Params.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/Params.java index 9fa73c01fc8..558521ad72f 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/Params.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/Params.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; import java.util.Map; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/context/CliCtx.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/context/CliCtx.java new file mode 100644 index 00000000000..f18a402c2e9 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/context/CliCtx.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.entity.context; + +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.entity.var.VarAccess; + +import java.util.Map; + +public interface CliCtx { + /** + * accessing and passing VarAccess instance between jobs + * + * @return + */ + CmdType getCmdType(); + + CmdTemplate getTemplate(); + + VarAccess getVarAccess(); + + Map getExtraMap(); +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/Job.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/Job.java new file mode 100644 index 00000000000..303c530c2a3 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/Job.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.entity.job; + +import org.apache.linkis.cli.application.entity.context.CliCtx; + +public interface Job { + void build(CliCtx ctx); + + JobResult run(); + + void onDestroy(); +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobResult.java new file mode 100644 index 00000000000..eba7b7ba8a5 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobResult.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.entity.job; + +import java.util.Map; + +public interface JobResult { + Boolean isSuccess(); + + String getMessage(); + + Map getExtraMessage(); +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobStatus.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobStatus.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobStatus.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobStatus.java index d0b6f445b69..7f048dd88a7 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobStatus.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobStatus.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.job; +package org.apache.linkis.cli.application.entity.job; public interface JobStatus { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/operator/JobOper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/operator/JobOper.java new file mode 100644 index 00000000000..ae1bf7c4659 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/operator/JobOper.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.entity.operator; + +public interface JobOper {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Model.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Model.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Model.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Model.java index b68fd986aa0..9fe2c106fc1 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Model.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Model.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.present; +package org.apache.linkis.cli.application.entity.present; public interface Model { void buildModel(Object data); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Presenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Presenter.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Presenter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Presenter.java index dbe1adaefe8..7c27316dfbd 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Presenter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Presenter.java @@ -15,8 +15,8 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.present; +package org.apache.linkis.cli.application.entity.present; public interface Presenter { - void present(Model model, PresentWay presentWay); + void present(Model model); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/var/VarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/var/VarAccess.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/var/VarAccess.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/var/VarAccess.java index 50df2236e13..8236bd81c9c 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/var/VarAccess.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/var/VarAccess.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.var; +package org.apache.linkis.cli.application.entity.var; /** * @program: linkis-cli @@ -24,8 +24,6 @@ */ public interface VarAccess { - void checkInit(); - T getVar(Class clazz, String key); T getVarOrDefault(Class clazz, String key, T defaultValue); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/BuilderException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/BuilderException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/BuilderException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/BuilderException.java index b4f35643aa1..b00c15f7e07 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/BuilderException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/BuilderException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class BuilderException extends LinkisClientRuntimeException { private static final long serialVersionUID = 5454234257L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/CommandException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/CommandException.java similarity index 85% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/CommandException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/CommandException.java index 6f6c5512d47..e0cff74b1ec 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/CommandException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/CommandException.java @@ -15,12 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class CommandException extends LinkisClientRuntimeException { private static final long serialVersionUID = 745261661L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientException.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientException.java index 7ffd880b73a..9dcb69a4107 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientException.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception; +package org.apache.linkis.cli.application.exception; public abstract class LinkisClientException extends Exception { private static final long serialVersionUID = 42563456489L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/LinkisClientExecutionException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientExecutionException.java similarity index 84% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/LinkisClientExecutionException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientExecutionException.java index e97d068d4e6..3acfd71f998 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/LinkisClientExecutionException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientExecutionException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class LinkisClientExecutionException extends LinkisClientRuntimeException { private static final long serialVersionUID = 987189405659L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientRuntimeException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientRuntimeException.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientRuntimeException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientRuntimeException.java index 98538b05e4a..0d5f4f79762 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientRuntimeException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientRuntimeException.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; import java.text.MessageFormat; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PresenterException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PresenterException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PresenterException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PresenterException.java index dbfd57a2cc4..42aa1b1e171 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PresenterException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PresenterException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class PresenterException extends LinkisClientRuntimeException { private static final long serialVersionUID = 212314213L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PropsException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PropsException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PropsException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PropsException.java index e7e3799dccb..d39a9de3c0e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PropsException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PropsException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class PropsException extends LinkisClientRuntimeException { private static final long serialVersionUID = 182747823415933L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/TransformerException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/TransformerException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/TransformerException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/TransformerException.java index efb578b971f..d5b35c6c8d5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/TransformerException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/TransformerException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class TransformerException extends LinkisClientRuntimeException { private static final long serialVersionUID = 5454234257L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/UnknownException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/UnknownException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/UnknownException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/UnknownException.java index ff5cfac1391..a3fecf3537d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/UnknownException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/UnknownException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class UnknownException extends LinkisClientRuntimeException { private static final long serialVersionUID = 974159L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/ValidateException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/ValidateException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/ValidateException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/ValidateException.java index 95896d9aa08..f796328dd5a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/ValidateException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/ValidateException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class ValidateException extends LinkisClientRuntimeException { private static final long serialVersionUID = 5454234257L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/VarAccessException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/VarAccessException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/VarAccessException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/VarAccessException.java index 34509947d4f..31dc848e367 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/VarAccessException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/VarAccessException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class VarAccessException extends LinkisClientRuntimeException { private static final long serialVersionUID = 125344127L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/error/CommonErrMsg.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/CommonErrMsg.java similarity index 96% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/error/CommonErrMsg.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/CommonErrMsg.java index 67eb0701c73..ea6247e0c4f 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/error/CommonErrMsg.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/CommonErrMsg.java @@ -15,9 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception.error; - -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +package org.apache.linkis.cli.application.exception.error; public enum CommonErrMsg implements ErrorMsg { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorLevel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorLevel.java similarity index 96% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorLevel.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorLevel.java index ed2867c72dc..170cedf9a5d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorLevel.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorLevel.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception.error; +package org.apache.linkis.cli.application.exception.error; public enum ErrorLevel { /** warn 1 error 2 fatal 3 */ diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorMsg.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorMsg.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorMsg.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorMsg.java index 3d092fe67dd..4b6c5d32448 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorMsg.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorMsg.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception.error; +package org.apache.linkis.cli.application.exception.error; public interface ErrorMsg { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CliCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CliCmdType.java new file mode 100644 index 00000000000..f2a352c5420 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CliCmdType.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.command; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.command.CmdType; + +public enum CliCmdType implements CmdType { + UNIVERSAL(CliConstants.UNIVERSAL_SUBCMD, 1, CliConstants.UNIVERSAL_SUBCMD_DESC); + + private int id; + private String name; + private String desc; + + CliCmdType(String name, int id) { + this.id = id; + this.name = name; + this.desc = null; + } + + CliCmdType(String name, int id, String desc) { + this.id = id; + this.name = name; + this.desc = desc; + } + + @Override + public int getId() { + return this.id; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public String getDesc() { + return this.desc; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/CmdTemplateFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CmdTemplateFactory.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/CmdTemplateFactory.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CmdTemplateFactory.java index 91a9f796cbd..f3b901c1f87 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/CmdTemplateFactory.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CmdTemplateFactory.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command; +package org.apache.linkis.cli.application.interactor.command; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/SpecialMap.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/SpecialMap.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/SpecialMap.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/SpecialMap.java index 86075e33284..427c35baef8 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/SpecialMap.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/SpecialMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command; +package org.apache.linkis.cli.application.interactor.command; import java.util.HashMap; import java.util.Map; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/AbstractFitter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/AbstractFitter.java similarity index 89% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/AbstractFitter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/AbstractFitter.java index d32d05bbb6d..f5e1a99b587 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/AbstractFitter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/AbstractFitter.java @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.template.option.Flag; -import org.apache.linkis.cli.core.interactor.command.template.option.Parameter; +package org.apache.linkis.cli.application.interactor.command.fitter; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.template.option.Flag; +import org.apache.linkis.cli.application.interactor.command.template.option.Parameter; import org.apache.commons.lang3.StringUtils; @@ -225,7 +225,7 @@ private final int setParameterValue( Parameter param = (Parameter) cmdOption; if (param.accepctArrayValue()) { String[] args2 = Arrays.copyOfRange(args, argIdx, args.length); - param.setValueWithStr(StringUtils.join(args2, CommonConstants.ARRAY_SEQ)); + param.setValueWithStr(StringUtils.join(args2, CliConstants.ARRAY_SEQ)); return args.length; } else { parameters.get(paraIdx).setValueWithStr(args[argIdx]); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/Fitter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/Fitter.java similarity index 80% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/Fitter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/Fitter.java index 305ffcaef19..cfd0f0fcfad 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/Fitter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/Fitter.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; public interface Fitter { FitterResult fit(String[] input, CmdTemplate templateCopy) throws LinkisClientRuntimeException; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterResult.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterResult.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterResult.java index 5e9f9820b17..abcee0d9afe 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterResult.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterResult.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; public class FitterResult { String[] remains; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterUtils.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtils.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterUtils.java index 40d80d84b22..1b6b46cd107 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtils.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterUtils.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; public class FitterUtils { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/SingleTplFitter.java similarity index 79% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/SingleTplFitter.java index 8eed2ea89fe..06b7d7e4f83 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/SingleTplFitter.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import java.util.ArrayList; import java.util.List; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/AbstarctParser.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/AbstarctParser.java similarity index 85% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/AbstarctParser.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/AbstarctParser.java index 30c0e1f014b..16107b18695 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/AbstarctParser.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/AbstarctParser.java @@ -15,20 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.ParamItem; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; -import org.apache.linkis.cli.core.interactor.command.fitter.Fitter; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.command.parser.transformer.ParamKeyMapper; +package org.apache.linkis.cli.application.interactor.command.parser; + +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.ParamItem; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.fitter.Fitter; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.interactor.command.parser.transformer.ParamKeyMapper; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/Parser.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/Parser.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/Parser.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/Parser.java index 0f3c847740c..0e10977a63c 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/Parser.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/Parser.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser; +package org.apache.linkis.cli.application.interactor.command.parser; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; /** * 1. parse cmd arguments and fill into template 2. generate unique identifier for sub command 3. diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParser.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/SingleCmdParser.java similarity index 77% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParser.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/SingleCmdParser.java index aea3086fd52..4afb7a759f6 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParser.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/SingleCmdParser.java @@ -15,15 +15,15 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser; +package org.apache.linkis.cli.application.interactor.command.parser; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.fitter.FitterResult; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.fitter.FitterResult; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; import java.util.Arrays; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/result/ParseResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/result/ParseResult.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/result/ParseResult.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/result/ParseResult.java index 2a528f3b863..75059c86c50 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/result/ParseResult.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/result/ParseResult.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser.result; +package org.apache.linkis.cli.application.interactor.command.parser.result; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.Params; public class ParseResult { CmdTemplate parsedTemplate; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/transformer/ParamKeyMapper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/transformer/ParamKeyMapper.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/transformer/ParamKeyMapper.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/transformer/ParamKeyMapper.java index 3799906bd5c..c78e7c57c9b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/transformer/ParamKeyMapper.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/transformer/ParamKeyMapper.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser.transformer; +package org.apache.linkis.cli.application.interactor.command.parser.transformer; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.utils.CommonUtils; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.utils.CliUtils; import org.apache.commons.lang3.StringUtils; @@ -47,8 +47,7 @@ public ParamKeyMapper() { } public ParamKeyMapper(Map mapperRules) { - mapperRules = new HashMap<>(); - initMapperRules(mapperRules); + initMapperRules(new HashMap<>()); } /** Executor should overwrite init() method to set key to key mapping */ @@ -79,7 +78,7 @@ public void updateMapping(String key, String targetKey) { /** update keyMapping according to kv-String. */ private void updateMappingbyConfig(String kvString) { if (StringUtils.isNotBlank(kvString)) { - Map result = CommonUtils.parseKVStringToMap(kvString, ","); + Map result = CliUtils.parseKVStringToMap(kvString, ","); this.mapperRules.putAll(result); } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/AbstractCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/AbstractCmdTemplate.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/AbstractCmdTemplate.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/AbstractCmdTemplate.java index 992b58effc4..ec1f318c2bc 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/AbstractCmdTemplate.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/AbstractCmdTemplate.java @@ -15,20 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; -import org.apache.linkis.cli.core.interactor.command.template.option.*; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; -import org.apache.linkis.cli.core.utils.converter.PredefinedStringConverters; +package org.apache.linkis.cli.application.interactor.command.template; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.PredefinedStringConverters; +import org.apache.linkis.cli.application.interactor.command.template.option.*; import java.lang.reflect.Field; import java.util.*; @@ -291,10 +291,10 @@ protected final SpecialMapOption speciaMapOption( private void checkIllegalOption(final String[] names) { if (names == null || names.length <= 0) { throw new IllegalArgumentException("At least one cmdType should be given to CmdOption."); - } else if (names.length > CommonConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS) { + } else if (names.length > CliConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS) { throw new IllegalArgumentException( "At most " - + CommonConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS + + CliConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS + " cmdType can be given to CmdOption."); } else { for (String name : names) { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java new file mode 100644 index 00000000000..f66f5ab3860 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java @@ -0,0 +1,398 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.command.template; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.CliCmdType; +import org.apache.linkis.cli.application.interactor.command.template.option.*; + +import org.apache.commons.lang3.StringUtils; + +import java.io.File; +import java.util.Arrays; + +public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneable { + + protected StdOption gatewayUrl = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_GATEWAY_URL, + new String[] {"--gatewayUrl"}, + "specify linkis gateway url", + true, + ""); + protected StdOption authenticatationStrategy = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, + new String[] {"--authStg"}, + "specify linkis authentication strategy", + true, + ""); + protected StdOption authKey = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_TOKEN_KEY, + new String[] {"--authKey"}, + "specify linkis authentication key(tokenKey)", + true, + ""); + protected StdOption authValue = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_TOKEN_VALUE, + new String[] {"--authVal"}, + "specify linkis authentication value(tokenValue)", + true, + ""); + protected StdOption userConfigPath = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_USER_CONFIG, + new String[] {"--userConf"}, + "specify user configuration file path(absolute)", + true, + ""); + protected StdOption killOpt = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_KILL_OPT, + new String[] {"--kill"}, + "specify linkis taskId for job to be killed", + true, + ""); + protected StdOption logOpt = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_LOG_OPT, + new String[] {"--log"}, + "specify linkis taskId for querying job status", + true, + ""); + protected StdOption resultOpt = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_RESULT_OPT, + new String[] {"--result"}, + "specify linkis taskId for querying job status", + true, + ""); + protected StdOption statusOpt = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_STATUS_OPT, + new String[] {"--status"}, + "specify linkis taskId for querying job status", + true, + ""); + protected StdOption asyncOpt = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_ASYNC_OPT, + new String[] {"--async"}, + "specify linkis taskId for querying job status", + true, + false); + protected StdOption modeOpt = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_MODE_OPT, + new String[] {"--mode"}, + "specify linkis execution mode: " + + CliConstants.UJES_MODE + + "/" + + CliConstants.ONCE_MODE + + ".", + true, + CliConstants.UJES_MODE); + protected Flag helpOpt = + flag( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_HELP_OPT, + new String[] {"--help"}, + "specify linkis taskId for querying job status", + true, + false); + + protected StdOption clusterOP = + option( + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL_CLUSTER, + new String[] {"-yarnCluster"}, + "specify linkis yarn cluster for this job", + true, + ""); + + protected StdOption engineTypeOP = + option( + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL_ENGINE_TYPE, + new String[] {"-engineType"}, + "specify linkis engineType for this job", + true, + ""); + + protected StdOption codeTypeOp = + option( + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL_CODE_TYPE, + new String[] {"-codeType"}, + "specify linkis runType for this job", + true, + ""); + protected StdOption codePathOp = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_CODE_PATH, + new String[] {"-codePath"}, + "specify file path that contains code you want to execute", + true, + ""); + + protected StdOption codeOp = + option( + CliKeys.JOB_EXEC, + CliKeys.JOB_EXEC_CODE, + new String[] {"-code"}, + "specify code that you want to execute", + true, + ""); + + protected StdOption scriptPathOp = + option( + CliKeys.JOB_SOURCE, + CliKeys.JOB_SOURCE_SCRIPT_PATH, + new String[] {"-scriptPath"}, + "specify remote path for your uploaded script", + true, + ""); + + protected StdOption submitUser = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_SUBMIT_USER, + new String[] {"-submitUser"}, + "specify submit user for this job", + true, + ""); + + protected StdOption proxyUser = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_PROXY_USER, + new String[] {"-proxyUser"}, + "specify proxy user who executes your code in Linkis server-side", + true, + ""); + + protected StdOption creatorOp = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_CREATOR, + new String[] {"-creator"}, + "specify creator for this job", + true, + ""); + + protected StdOption outPathOp = + option( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH, + new String[] {"-outPath"}, + "specify output path for resultSet. If not specified, then output reset to screen(stdout)", + true, + ""); + + protected MapOption confMapOp = + mapOption( + CliKeys.JOB_PARAM_CONF, + CliKeys.JOB_PARAM_CONF, + new String[] {"-confMap"}, + "specify configurationMap(startupMap) for your job. You can put any start-up parameters into this Map(e.g. spark.executor.instances). Input format: -confMap key1=value1 -confMap key2=value2", + true); + + protected MapOption runtimeMapOp = + mapOption( + CliKeys.JOB_PARAM_RUNTIME, + CliKeys.JOB_PARAM_RUNTIME, + new String[] {"-runtimeMap"}, + "specify runtimeMap for your job. You can put any start-up parameters into this Map(e.g. spark.executor.instances). Input format: -runtimeMap key1=value1 -runtimeMap key2=value2", + true); + + protected SpecialMapOption varMapOp = + speciaMapOption( + CliKeys.JOB_PARAM_VAR, + CliKeys.JOB_PARAM_VAR, + new String[] {"-varMap"}, + "specify variables map. Variables is for key-word substitution. Use \'${key}\' to specify key-word. Input substitution rule as follow: -varMap key1=value1 -varMap key2=value2", + true); + + protected MapOption labelMapOp = + mapOption( + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL, + new String[] {"-labelMap"}, + "specify label map. You can put any Linkis into this Map. Input format: -labelMap labelName1=labelValue1 -labelMap labelName2=labelValue2", + true); + + protected MapOption sourceMapOp = + mapOption( + CliKeys.JOB_SOURCE, + CliKeys.JOB_SOURCE, + new String[] {"-sourceMap"}, + "specify source map. Input format: -sourceMap key1=value1 -sourceMap key2=value2", + true); + + protected MapOption jobContentMapOp = + mapOption( + CliKeys.JOB_CONTENT, + CliKeys.JOB_CONTENT, + new String[] {"-jobContentMap"}, + "specify jobContent map. Input format: -jobContentMap key1=value1 -jobContentMap key2=value2", + true); + + protected Flag versionFlag = + flag( + CliKeys.VERSION, + CliKeys.VERSION, + new String[] {"--version"}, + "show version", + true, + false); + + protected Parameter argumentsParas = + parameter( + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_EXTRA_ARGUMENTS, + "arguments", + "specify arguments if exist any", + true, + new String[] {""}); + + public UniversalCmdTemplate() { + super(CliCmdType.UNIVERSAL); + } + + @Override + public void checkParams() throws CommandException { + if (versionFlag.hasVal()) { + return; + } + int cnt = 0; + if (statusOpt.hasVal()) { + cnt++; + } + if (killOpt.hasVal()) { + cnt++; + } + if (logOpt.hasVal()) { + cnt++; + } + if (resultOpt.hasVal()) { + cnt++; + } + if (helpOpt.hasVal()) { + cnt++; + } + if (cnt > 1) { + throw new ValidateException( + "VLD0001", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "Can only specify 1 of: " + + statusOpt.getParamName() + + "/" + + killOpt.getParamName() + + "/" + + helpOpt.getParamName() + + "/"); + } else if (cnt == 0) { + int cnt2 = 0; + if (argumentsParas.hasVal()) { + if (!(argumentsParas.getValue() instanceof String[]) + || argumentsParas.getValue().length == 0) { + throw new ValidateException( + "VLD0001", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + argumentsParas.getParamName() + + "has raw-value but failed to convert it into String-array. Raw-value: " + + argumentsParas.getRawVal()); + } + String firstPara = argumentsParas.getValue()[0]; + if (StringUtils.startsWith(firstPara, "-")) { + throw new CommandException( + "CMD0011", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + this.cmdType, + "Illegal argument: " + Arrays.toString(argumentsParas.getValue())); + } + File file = new File(firstPara); + if (!file.exists() || !file.isFile()) { + throw new ValidateException( + "VLD0001", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "Argument: \'" + + firstPara + + "\' is not a linkis-cli option. Assume it's script file, but no file named \'" + + firstPara + + "\' is found"); + } + cnt2++; + } + if (codeOp.hasVal()) { + cnt2++; + } + if (codePathOp.hasVal()) { + cnt2++; + } + if (!modeOpt.hasVal() + || StringUtils.equalsIgnoreCase(modeOpt.getValue(), CliConstants.UJES_MODE)) { + if (cnt2 > 1) { + throw new ValidateException( + "VLD0001", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "Can only specify at most one of linkis-cli option: \''" + + codeOp.getParamName() + + "\' or \'" + + codePathOp.getParamName() + + "\' or \'script-path and script-arguments\'"); + } + if (cnt2 == 0) { + throw new ValidateException( + "VLD0001", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "Need to specify at least one of linkis-cli option: \'" + + codeOp.getParamName() + + "\' or \'" + + codePathOp.getParamName() + + "\' or \'script-path and script-arguments\'."); + } + } + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/AbstractStringConverter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/AbstractStringConverter.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/AbstractStringConverter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/AbstractStringConverter.java index 3d5afaa172c..68f3fae3967 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/AbstractStringConverter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/AbstractStringConverter.java @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils.converter; +package org.apache.linkis.cli.application.interactor.command.template.converter; public abstract class AbstractStringConverter implements Converter {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/Converter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/Converter.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/Converter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/Converter.java index 327f44cf2ed..757d73d1788 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/Converter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/Converter.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils.converter; +package org.apache.linkis.cli.application.interactor.command.template.converter; public interface Converter { TO convert(FROM from); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/PredefinedStringConverters.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/PredefinedStringConverters.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/PredefinedStringConverters.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/PredefinedStringConverters.java index 5883f68e1bd..4739ab0445d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/PredefinedStringConverters.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/PredefinedStringConverters.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils.converter; +package org.apache.linkis.cli.application.interactor.command.template.converter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; import org.apache.commons.lang3.StringUtils; @@ -50,7 +50,7 @@ public String[] convert(String from) { if (StringUtils.isBlank(from)) { return null; } - String[] ret = from.trim().split(CommonConstants.ARRAY_SEQ); + String[] ret = from.trim().split(CliConstants.ARRAY_SEQ); for (int i = 0; i < ret.length; i++) { ret[i] = StringUtils.strip(ret[i], " \""); } @@ -66,7 +66,7 @@ public Map convert(String from) { return null; } Map paraMap = new HashMap<>(); - String[] arr = from.trim().split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); + String[] arr = from.trim().split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); // NOSONAR for (String prop : arr) { prop = prop.trim(); int index = prop.indexOf("="); @@ -97,7 +97,7 @@ public SpecialMap convert(String from) { return null; } SpecialMap paraMap = new SpecialMap<>(); - String[] arr = from.trim().split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); + String[] arr = from.trim().split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); // NOSONAR for (String prop : arr) { prop = prop.trim(); int index = prop.indexOf("="); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/BaseOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/BaseOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java index 8e9f288567a..eee29c8e94e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/BaseOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; import org.apache.commons.lang3.StringUtils; @@ -93,7 +93,7 @@ public void setValueWithStr(String value) throws IllegalArgumentException { } public T getValue() { - return this.value; + return this.value == null ? this.defaultValue : this.value; } public void setValue(T value) { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Flag.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java similarity index 79% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Flag.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java index 8b153fb7ac1..47af30b0d09 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Flag.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.utils.converter.PredefinedStringConverters; +import org.apache.linkis.cli.application.interactor.command.template.converter.PredefinedStringConverters; import org.apache.commons.lang3.StringUtils; @@ -43,14 +43,9 @@ public Flag( @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("\t") - .append(StringUtils.join(paramNames, "|")) - .append(" <") - .append(this.getDefaultValue().getClass().getSimpleName()) - .append(">") - .append(System.lineSeparator()); + sb.append("\t").append(StringUtils.join(paramNames, "|")).append(System.lineSeparator()); - sb.append("\t\t").append(this.getDefaultValue()).append(System.lineSeparator()); + sb.append("\t\t").append(this.getDescription()).append(System.lineSeparator()); sb.append("\t\tdefault by: ").append(this.getDefaultValue()).append(System.lineSeparator()); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/MapOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/MapOption.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/MapOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/MapOption.java index 095004c4d1a..d330d11c9b5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/MapOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/MapOption.java @@ -15,11 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Parameter.java similarity index 92% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Parameter.java index 09422106c8f..c75143e2723 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Parameter.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; import org.apache.commons.lang3.StringUtils; @@ -67,7 +67,7 @@ public String toString() { .append( defaultValue.getClass().isArray() ? StringUtils.join((Object[]) defaultValue, ", ") - : (defaultValue == null ? "" : defaultValue.toString())) + : defaultValue.toString()) .append(System.lineSeparator()); sb.append("\t\toptional:").append(isOptional()); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/SpecialMapOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/SpecialMapOption.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/SpecialMapOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/SpecialMapOption.java index d14758206b3..3fc6385db0b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/SpecialMapOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/SpecialMapOption.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; /** kv-pairs in SpecialMapOption will be excluded by varAccess */ public class SpecialMapOption extends MapOption implements Cloneable { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/StdOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/StdOption.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/StdOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/StdOption.java index a9d29aef264..85468cd4609 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/StdOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/StdOption.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; import org.apache.commons.lang3.StringUtils; @@ -54,7 +54,7 @@ public String toString() { .append( defaultValue.getClass().isArray() ? StringUtils.join((Object[]) defaultValue, ", ") - : (defaultValue == null ? "" : defaultValue.toString())) + : defaultValue.toString()) .append(System.lineSeparator()); sb.append("\t\toptional:").append(isOptional()); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/context/CliCtxImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/context/CliCtxImpl.java new file mode 100644 index 00000000000..7d4f5d94b88 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/context/CliCtxImpl.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.context; + +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.var.VarAccess; + +import java.util.Map; + +public class CliCtxImpl implements CliCtx { + private CmdType cmdType; + private CmdTemplate cmdTemplate; + private VarAccess varAccess; + private Map extraMap; + + public CliCtxImpl( + CmdType cmdType, CmdTemplate cmdTemplate, VarAccess varAccess, Map extraMap) { + this.cmdType = cmdType; + this.cmdTemplate = cmdTemplate; + this.varAccess = varAccess; + this.extraMap = extraMap; + } + + @Override + public CmdType getCmdType() { + return cmdType; + } + + @Override + public CmdTemplate getTemplate() { + return cmdTemplate; + } + + @Override + public VarAccess getVarAccess() { + return varAccess; + } + + @Override + public Map getExtraMap() { + return extraMap; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/JobKiller.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/JobKiller.java new file mode 100644 index 00000000000..6693abe85f1 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/JobKiller.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.text.MessageFormat; +import java.util.HashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JobKiller { + private static final Logger logger = LoggerFactory.getLogger(JobKiller.class); + + private LinkisJobOper oper; + + public JobKiller(LinkisJobOper linkisJobOperator) { + this.oper = linkisJobOperator; + } + + public KillResult doKill(String username, String jobId) { + + LinkisOperResultAdapter jobInfoResult; + try { + jobInfoResult = oper.queryJobInfo(username, jobId); + } catch (Exception e) { + return new KillResult( + true, "Failed to query jobInfo" + ExceptionUtils.getStackTrace(e), new HashMap<>()); + } + if (jobInfoResult.getUser() == null || jobInfoResult.getJobID() == null) { + return new KillResult(false, "user or jobID is null", new HashMap<>()); + } + if (jobInfoResult.getJobStatus() == null) { + return new KillResult(false, "jobStatus is null", new HashMap<>()); + } + if (jobInfoResult.getJobStatus().isJobCancelled()) { + String msg = "Kill job aborted: Job has already been canceled."; + return new KillResult(false, msg, new HashMap<>()); + } else if (jobInfoResult.getJobStatus().isJobFinishedState()) { + String msg = "Kill job aborted: Job is already in finished-state(SUCCEED/FAILED)."; + return new KillResult(false, msg, new HashMap<>()); + // throw new LinkisClientExecutionException(JobStatus.FAILED, "EXE0004", + // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } else { + try { + LinkisOperResultAdapter jobKillResult = + oper.kill( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId()); + } catch (Exception e) { + return new KillResult( + false, + "Exception thrown when trying to send kill request. Messgae: " + + ExceptionUtils.getStackTrace(e), + new HashMap<>()); + } + String msg = "Kill request has been sent"; + LoggerManager.getPlaintTextLogger().info(msg); + int retryCnt = 0; + final int MAX_RETRY = 5 * 6; + while (!jobInfoResult.getJobStatus().isJobFinishedState() + && !jobInfoResult.getJobStatus().isJobCancelled()) { + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + try { + jobInfoResult = oper.queryJobInfo(jobInfoResult.getUser(), jobInfoResult.getJobID()); + retryCnt = 0; // if exception then will not go here + } catch (Exception e) { + retryCnt++; + CliUtils.doSleepQuietly(5 * CliConstants.JOB_QUERY_SLEEP_MILLS); + if (retryCnt >= MAX_RETRY) { + return new KillResult( + false, + MessageFormat.format( + "After send kill. Client cannot get jobStatus from server continuously for {0} seconds. Client aborted. Assume kill failed! Error message: \n", + MAX_RETRY * 5 * CliConstants.JOB_QUERY_SLEEP_MILLS), + new HashMap<>()); + } + } + } + if (jobInfoResult.getJobStatus().isJobFinishedState() + && !jobInfoResult.getJobStatus().isJobCancelled()) { + msg = "Kill Failed: Job Current status: " + jobInfoResult.getJobStatus(); + return new KillResult(false, msg, new HashMap<>()); + // throw new LinkisClientExecutionException(JobStatus.FAILED, + // "EXE0004", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } else if (jobInfoResult.getJobStatus().isJobCancelled()) { + msg = + MessageFormat.format( + "Kill successful: jobId={0}, status={1}.", + jobInfoResult.getJobID(), jobInfoResult.getJobStatus()); + return new KillResult(true, msg, new HashMap<>()); + // LogUtils.getPlaintTextLogger().info(msg); + } else { + return new KillResult(false, "Unknown Error!!", new HashMap<>()); + } + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KeyParser.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KeyParser.java new file mode 100644 index 00000000000..01295ce6768 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KeyParser.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.constants.CliKeys; + +import org.apache.commons.lang3.StringUtils; + +import java.util.HashMap; +import java.util.Map; + +public class KeyParser { + + public static Map removePrefixForKeysInMap(Map map) { + final String[] PREFIX = + new String[] { + CliKeys.JOB_PARAM_CONF, + CliKeys.JOB_PARAM_RUNTIME, + CliKeys.JOB_PARAM_VAR, + CliKeys.JOB_EXEC, + CliKeys.JOB_SOURCE, + CliKeys.JOB_LABEL, + CliKeys.JOB_CONTENT + }; + for (String prefix : PREFIX) { + map = removePrefixForKeysInMap(map, prefix); + } + return map; + } + + public static void removePrefixAndPutValToMap( + Map map, String key, Object value, String prefix) { + String realKey = getRealKey(key, prefix); + if (StringUtils.isNotBlank(realKey) && !(value instanceof Map)) { + map.put(realKey, value); + } + } + + private static Map removePrefixForKeysInMap( + Map map, String prefix) { + if (map == null) { + return null; + } + Map newMap = new HashMap<>(); + for (String key : map.keySet()) { + String realKey = getRealKey(key, prefix); + if (StringUtils.isNotBlank(realKey)) { + if (StringUtils.startsWith(key, prefix)) { + newMap.put(realKey, map.get(key)); + } else { + newMap.put(key, map.get(key)); + } + } + } + return newMap; + } + + private static String getRealKey(String key, String prefix) { + String realKey = key; + if (StringUtils.startsWith(key, prefix)) { + realKey = StringUtils.substring(key, prefix.length() + 1); + } + return realKey; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KillResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KillResult.java new file mode 100644 index 00000000000..732cd00ec7b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KillResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class KillResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public KillResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatus.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LinkisJobStatus.java similarity index 97% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatus.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LinkisJobStatus.java index 9b1435da2ae..dd4df9ee9b6 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatus.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LinkisJobStatus.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job; +package org.apache.linkis.cli.application.interactor.job.common; +import org.apache.linkis.cli.application.entity.job.JobStatus; import org.apache.linkis.cli.application.operator.once.LinkisNodeStatus; -import org.apache.linkis.cli.common.entity.job.JobStatus; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogData.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogData.java new file mode 100644 index 00000000000..5397bd04f17 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogData.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.LinkedBlockingDeque; + +public class LogData { + private String user; + private String jobId; + private String execId; + // log will be fetched and stored in LinkedBlockingDeque, while logRetriever consumes log in + // another thread + private LinkedBlockingDeque logBuffer = new LinkedBlockingDeque(); + private String logPath; // remote path for job log + private Integer nextLogLineIdx; // index of next log line to be fetched + private Boolean hasNextLogLine; // if there is still log to be retrieve + private volatile Boolean logFin = false; // if all log is finished + + public LogData(String user, String jobId, String execId) { + this.user = user; + this.jobId = jobId; + this.execId = execId; + } + + public String getUser() { + return user; + } + + public String getJobID() { + return jobId; + } + + public String getExecID() { + return execId; + } + + public final String getLogPath() { + return logPath; + } + + public final void setLogPath(String logPath) { + this.logPath = logPath; + } + + public String consumeLog() { + List logs = new LinkedList<>(); + this.logBuffer.drainTo(logs, this.logBuffer.size()); + StringBuilder tmp = new StringBuilder(); + for (String str : logs) { + tmp.append(str); + } + return tmp.toString(); + } + + public void appendLog(String log) { + this.logBuffer.add(log); + } + + public Integer getNextLogLineIdx() { + return nextLogLineIdx; + } + + public void setNextLogLineIdx(Integer nextLogLineIdx) { + this.nextLogLineIdx = nextLogLineIdx; + } + + public Boolean hasNextLogLine() { + return hasNextLogLine; + } + + public void setHasNextLogLine(Boolean hasNextLogLine) { + this.hasNextLogLine = hasNextLogLine; + } + + public void updateLog(LinkisOperResultAdapter adapter) { + if (adapter.getLogPath() != null) { + setLogPath(adapter.getLogPath()); + } + if (adapter.getLog() != null + && adapter.getNextLogLine() != null + && adapter.hasNextLogLine() != null) { + setNextLogLineIdx(adapter.getNextLogLine()); + setHasNextLogLine(adapter.hasNextLogLine()); + appendLog(adapter.getLog()); + } + } + + public void setLogFin() { + this.logFin = true; + } + + public Boolean isLogFin() { + return logFin; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogRetriever.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogRetriever.java new file mode 100644 index 00000000000..a6a5a38d6c7 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogRetriever.java @@ -0,0 +1,190 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.cli.application.observer.event.LogStartEvent; +import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import java.text.MessageFormat; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Log retrieval logic: 1. LogRetriever polls to obtain real-time logs, and if the task is + * completed, it retrieves persistent logs 2. Organized by org.apache.inkis.cli.application. + * interactor.job. com LogRetriever # sendLogFin decides whether to continue polling logs 3. + * getNextLogLine is the FromLine returned by the log interface 4. The return of persistent logs is + * OpenLogResult2 + */ +public class LogRetriever { + private static final Logger logger = LoggerFactory.getLogger(LogRetriever.class); + + private LinkisJobOper linkisJobOperator; + private LogData logData; + + private Boolean incLogMode; + + private LinkisClientListener logListener; + private LinkisClientEvent logStartEvent = new LogStartEvent(); + + public LogRetriever( + String user, + String jobId, + String execId, + Boolean incLogMode, + LinkisJobOper linkisJobOperator, + LinkisClientListener logListener) { + this.linkisJobOperator = linkisJobOperator; + this.logListener = logListener; + this.incLogMode = incLogMode; + this.logData = new LogData(user, jobId, execId); + registerLogListener(logListener); + } + + public void retrieveLogAsync() { + if (logData.getUser() == null || logData.getJobID() == null) { + throw new LinkisClientExecutionException( + "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); + } + try { + Thread logConsumer = new Thread(() -> notifyLogListener(), "Log-Consumer"); + Thread logRetriever = new Thread(() -> queryLogLoop(logData), "Log-Retriever"); + SchedulerManager.getCachedThreadPoolExecutor().execute(logRetriever); + SchedulerManager.getCachedThreadPoolExecutor().execute(logConsumer); + } catch (Exception e) { + logger.warn("Failed to retrieve log", e); + } + } + + public void waitIncLogComplete() { + int retry = 0; + int MAX_RETRY = 300; // wait for 10 minutes after job finish + while (retry++ < MAX_RETRY) { + if (this.logFinReceived()) { + return; + } + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + } + String msg = + "Job is in Finished state(SUCCEED/FAILED/CANCELED) but client keep querying inclog for " + + (MAX_RETRY * CliConstants.JOB_QUERY_SLEEP_MILLS / 1000) + + "seconds. Execution ends forcefully. Next will try handle execution result."; + logger.warn(msg); + LoggerManager.getInformationLogger().warn(msg); + } + + public void queryLogLoop(LogData data) { + int curLogIdx; + int nextLogIdx; + boolean hasNext = true; + int retryCnt = 0; + // continues fails for 90s, then exit thread + final int MAX_RETRY = 12; + try { + while (hasNext) { + curLogIdx = data.getNextLogLineIdx() == null ? 0 : data.getNextLogLineIdx(); + try { + queryJobLogFromLine(data, curLogIdx); + } catch (Exception e) { + logger.error("Cannot get inc-log:", e); + // and yes sometimes server may not be able to prepare persisted-log + retryCnt++; + if (retryCnt >= MAX_RETRY) { + logger.error( + "Continuously failing to query inc-log for " + + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 + + "s. Will no longer try to query log", + e); + break; + } + CliUtils.doSleepQuietly(500L + 500L * retryCnt); + continue; + } + retryCnt = 0; + nextLogIdx = data.getNextLogLineIdx() == null ? curLogIdx : data.getNextLogLineIdx(); + if (incLogMode) { + hasNext = data.hasNextLogLine() == null ? curLogIdx < nextLogIdx : data.hasNextLogLine(); + } else { + hasNext = curLogIdx < nextLogIdx; + } + if (curLogIdx >= nextLogIdx) { + String msg = + MessageFormat.format( + "Retrieving log, curLogIdx={}, hasNext={0}, nextLogIdx={1}", + curLogIdx, hasNext, nextLogIdx); + logger.info(msg); + } + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + } + } catch (Exception e) { + logger.error("Something goes wrong. Job Log may be incomplete", e); + } finally { + sendLogFin(); + } + } + + private void queryJobLogFromLine(LogData data, int fromLine) throws LinkisClientRuntimeException { + + LinkisOperResultAdapter jobInfoResult = + linkisJobOperator.queryJobInfo(data.getUser(), data.getJobID()); + data.updateLog(jobInfoResult); + if (!jobInfoResult.getJobStatus().isJobFinishedState()) { + data.updateLog( + linkisJobOperator.queryRunTimeLogFromLine( + data.getUser(), data.getJobID(), data.getExecID(), fromLine)); + } else { + data.updateLog( + linkisJobOperator.queryPersistedLogFromLine( + data.getLogPath(), data.getUser(), data.getJobID(), fromLine)); + } + } + + public Boolean isIncLogMode() { + return incLogMode; + } + + public void registerLogListener(LinkisClientListener observer) { + this.logStartEvent.register(observer); + } + + public void notifyLogListener() { + if (this.logStartEvent.isRegistered()) { + logStartEvent.notifyObserver(logStartEvent, this.logData); + } + } + + public void sendLogFin() { + this.logData.setLogFin(); + } + + public boolean logFinReceived() { + return this.logData.isLogFin(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultData.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultData.java new file mode 100644 index 00000000000..8c9bec028be --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultData.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.LinkedBlockingDeque; + +public class ResultData { + + private final String user; + private final String jobId; + private final String execId; + private final LinkedBlockingDeque resultContent = new LinkedBlockingDeque<>(); + private String extraMessage; + private JobStatus jobStatus = null; + private String resultLocation; + private String[] resultSetPaths = null; // remote paths for job result set + private Boolean hasNextResultPage; + private Integer errCode = null; + private String errDesc = null; + private boolean hasResult = true; + + private volatile Boolean resultFin = false; + + public ResultData(String user, String jobId, String execId) { + this.user = user; + this.jobId = jobId; + this.execId = execId; + } + + public String getJobID() { + return jobId; + } + + public String getUser() { + return user; + } + + public final String getExecID() { + return execId; + } + + public final String getResultLocation() { + return resultLocation; + } + + public final void setResultLocation(String resultLocation) { + this.resultLocation = resultLocation; + } + + public String[] getResultSetPaths() { + return resultSetPaths; + } + + public final void setResultSetPaths(String[] resultSetPaths) { + this.resultSetPaths = resultSetPaths; + } + + public Integer getErrCode() { + return errCode; + } + + public void setErrCode(Integer errCode) { + this.errCode = errCode; + } + + public String getErrDesc() { + return errDesc; + } + + public void setErrDesc(String errDesc) { + this.errDesc = errDesc; + } + + public List consumeResultContent() { + List ret = new LinkedList<>(); + resultContent.drainTo(ret, resultContent.size()); + return ret; + } + + public void appendResultContent(ResultSet resultContent) { + this.resultContent.add(resultContent); + } + + public Boolean hasNextResultPage() { + return hasNextResultPage; + } + + public void setHasNextResultPage(Boolean hasNextResultPage) { + this.hasNextResultPage = hasNextResultPage; + } + + public void setResultFin() { + this.resultFin = true; + } + + public boolean isResultFin() { + return this.resultFin; + } + + public boolean hasResult() { + return hasResult; + } + + public void setHasResult(boolean hasResult) { + this.hasResult = hasResult; + } + + public JobStatus getJobStatus() { + return jobStatus; + } + + public void setJobStatus(JobStatus jobStatus) { + this.jobStatus = jobStatus; + } + + public String getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(String extraMessage) { + this.extraMessage = extraMessage; + } + + public void updateByOperResult(LinkisOperResultAdapter adapter) { + if (adapter.getResultLocation() != null) { + setResultLocation(adapter.getResultLocation()); + } + if (adapter.getResultSetPaths() != null) { + setResultSetPaths(adapter.getResultSetPaths()); + } + if (adapter.getErrCode() != null) { + setErrCode(adapter.getErrCode()); + } + if (adapter.getErrDesc() != null) { + setErrDesc(adapter.getErrDesc()); + } + if (adapter.getResultContent() != null && adapter.resultHasNextPage() != null) { + setHasNextResultPage(adapter.resultHasNextPage()); + appendResultContent(adapter.getResultContent()); + } + if (adapter.getJobStatus() != null) { + setJobStatus(adapter.getJobStatus()); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultRetriever.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultRetriever.java new file mode 100644 index 00000000000..9997dd1f412 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultRetriever.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.observer.event.FetchResultEvent; +import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import org.apache.commons.lang3.StringUtils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ResultRetriever { + private static final Logger logger = LoggerFactory.getLogger(ResultRetriever.class); + + private LinkisJobOper linkisJobOperator; + private ResultData resultData; + + private LinkisClientListener resultListener; + private LinkisClientEvent fetchResultEvent = new FetchResultEvent(); + + public ResultRetriever( + String user, + String jobId, + String execId, + LinkisJobOper linkisJobOperator, + LinkisClientListener resultListener) { + this.linkisJobOperator = linkisJobOperator; + this.resultListener = resultListener; + this.resultData = new ResultData(user, jobId, execId); + registerResultListener(resultListener); + } + + public void retrieveResultSync() { + if (resultData.getUser() == null || resultData.getJobID() == null) { + throw new LinkisClientExecutionException( + "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); + } + resultData.updateByOperResult( + linkisJobOperator.queryJobInfo(resultData.getUser(), resultData.getJobID())); + if (resultData.getJobStatus() == null) { + throw new LinkisClientExecutionException( + "EXE0038", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "jobStatus is null"); + } + if (!resultData.getJobStatus().isJobSuccess()) { + LoggerManager.getInformationLogger() + .info( + "Job status is not success but \'" + + resultData.getJobStatus() + + "\'. Will not try to retrieve any Result"); + resultData.setResultFin(); // inform listener to stop + return; + } + if (StringUtils.isBlank(resultData.getResultLocation())) { + throw new LinkisClientExecutionException( + "EXE0037", + ErrorLevel.WARN, + CommonErrMsg.ExecutionErr, + "Got blank ResultLocation from server. Job may not have result-set. Will not try to retrieve any Result"); + } + resultData.updateByOperResult( + linkisJobOperator.queryResultSetPaths( + resultData.getUser(), resultData.getJobID(), resultData.getResultLocation())); + + if (resultData.getResultSetPaths() == null || resultData.getResultSetPaths().length == 0) { + String msg = "Your job got no result."; + logger.warn(msg); + resultData.setResultFin(); // inform listener to stop + resultData.setHasResult(false); + return; + } + + try { + resultData.setHasResult(true); + // Thread resultConsumer = new Thread(() -> notifyResultListener()); + Thread resultThread = new Thread(() -> queryResultLoop(resultData), "Result-Retrieve-Thread"); + // SchedulerUtils.getCachedThreadPoolExecutor().execute(resultConsumer); + SchedulerManager.getCachedThreadPoolExecutor().execute(resultThread); + notifyResultListener(); + } catch (Exception e) { + logger.error("Failed to retrieve result", e); + throw e; + } + } + + public void queryResultLoop(ResultData data) { + boolean hasNext = true; + int retryCnt = 0; + final int MAX_RETRY = 30; // continues fails for 250s, then exit + int idx = 0; + try { + while (hasNext) { + try { + hasNext = queryOneResult(data, idx); + } catch (LinkisClientRuntimeException e) { + logger.error("Cannot get result:", e); + retryCnt++; + if (retryCnt >= MAX_RETRY) { + logger.error( + "Continuously failing to query result for " + + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 + + "s. Will no longer try to query result", + e); + return; + } else { + hasNext = true; + } + CliUtils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer + continue; + } + idx++; + } + } catch (Exception e) { + logger.error("Something goes wrong. Job Result may be incomplete", e); + throw e; + } finally { + data.setResultFin(); + } + } + + private boolean queryOneResult(ResultData data, int idxResultSet) { + Integer curPage = 1; + boolean hasNextResult = true; + boolean hasNextPage = true; + while (hasNextPage) { + data.updateByOperResult( + linkisJobOperator.queryResultSetGivenResultSetPath( + data.getResultSetPaths(), + idxResultSet, + data.getUser(), + curPage, + CliConstants.RESULTSET_PAGE_SIZE)); + if (data.hasNextResultPage() == null) { + throw new LinkisClientExecutionException( + "EXE0040", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionResultErr, + "Something foes wrong. Got null as \'hasNextPage\'."); + } + hasNextPage = data.hasNextResultPage(); + + curPage++; + hasNextResult = idxResultSet + 1 < data.getResultSetPaths().length; + } + return hasNextResult; + } + + public void registerResultListener(LinkisClientListener observer) { + this.fetchResultEvent.register(observer); + } + + public void notifyResultListener() { + if (this.fetchResultEvent.isRegistered()) { + fetchResultEvent.notifyObserver(fetchResultEvent, this.resultData); + } + } + + public void setResultFin() { + this.resultData.setResultFin(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultSet.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultSet.java new file mode 100644 index 00000000000..5821e14a554 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultSet.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; + +public class ResultSet implements Cloneable { + private int resultsetIdx; + private List> resultMeta; + private List> content; + + public ResultSet() {} + + public int getResultsetIdx() { + return resultsetIdx; + } + + public void setResultsetIdx(int resultsetIdx) { + this.resultsetIdx = resultsetIdx; + } + + public List> getResultMeta() { + return resultMeta; + } + + public void setResultMeta(List> resultMeta) { + this.resultMeta = resultMeta; + } + + public List> getContent() { + return content; + } + + public void setContent(List> content) { + this.content = content; + } + + @Override + protected ResultSet clone() throws CloneNotSupportedException { + ResultSet ret = new ResultSet(); + if (this.resultMeta != null) { + List> resultMeta = null; + ret.resultMeta = new LinkedList<>(); + for (LinkedHashMap r1 : resultMeta) { + ret.resultMeta.add((LinkedHashMap) r1.clone()); + } + } + if (this.content.size() != 0) { + ret.content = new LinkedList<>(); + for (List r1 : content) { + ret.content.add(new LinkedList<>(r1)); + } + } + return ret; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/help/HelpJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/help/HelpJob.java new file mode 100644 index 00000000000..943c54ed633 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/help/HelpJob.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.help; + +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.present.HelpPresenter; +import org.apache.linkis.cli.application.present.model.HelpInfoModel; + +import java.util.HashMap; +import java.util.Map; + +public class HelpJob implements Job { + private CliCtx ctx; + + @Override + public void build(CliCtx ctx) { + this.ctx = ctx; + } + + @Override + public JobResult run() { + HelpInfoModel model = new HelpInfoModel(); + model.buildModel(ctx.getTemplate()); + new HelpPresenter().present(model); + return new JobResult() { + @Override + public Boolean isSuccess() { + return true; + } + + @Override + public String getMessage() { + return ""; + } + + @Override + public Map getExtraMessage() { + return new HashMap<>(); + } + }; + } + + @Override + public void onDestroy() {} +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJob.java new file mode 100644 index 00000000000..9affc775b23 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJob.java @@ -0,0 +1,268 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.interactive; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.JobKiller; +import org.apache.linkis.cli.application.interactor.job.common.LogRetriever; +import org.apache.linkis.cli.application.interactor.job.common.ResultRetriever; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.present.LogPresenter; +import org.apache.linkis.cli.application.present.ResultPresenter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class InteractiveJob implements Job { + + private static final Logger logger = LoggerFactory.getLogger(InteractiveJob.class); + + private CliCtx ctx; + + private Boolean isAsync = false; + + private LinkisJobOper oper; + + private InteractiveJobDesc desc; + + private String username; + + private String jobId; + + @Override + public void build(CliCtx ctx) { + this.ctx = ctx; + this.isAsync = + ctx.getVarAccess().getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + this.desc = InteractiveJobDescBuilder.build(ctx); + this.oper = (LinkisJobOper) OperManager.getNew(CliKeys.Linkis_OPER, ctx); + } + + @Override + public JobResult run() { + + // Indicator + StringBuilder infoBuilder = new StringBuilder(); + infoBuilder.append("connecting to linkis gateway:").append(oper.getServerUrl()); + LoggerManager.getInformationLogger().info(infoBuilder.toString()); + infoBuilder.setLength(0); + + // Submit + LinkisOperResultAdapter submitResult = oper.submit(desc); + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + + // JobInfo + LinkisOperResultAdapter jobInfoResult = + oper.queryJobInfo(submitResult.getUser(), submitResult.getJobID()); + oper.queryJobStatus( + submitResult.getUser(), submitResult.getJobID(), submitResult.getStrongerExecId()); + infoBuilder.setLength(0); + infoBuilder + .append("JobId:") + .append(submitResult.getJobID()) + .append(System.lineSeparator()) + .append("TaskId:") + .append(submitResult.getJobID()) + .append(System.lineSeparator()) + .append("ExecId:") + .append(submitResult.getStrongerExecId()); + LoggerManager.getPlaintTextLogger().info(infoBuilder.toString()); + infoBuilder.setLength(0); + + // Submit success or not + if (!jobInfoResult.getJobStatus().isJobSubmitted()) { + return new InteractiveJobResult(false, "Failed to submit job", new HashMap<>()); + } else { + // Output that job is submitted + infoBuilder.append("Job is successfully submitted!").append(System.lineSeparator()); + LoggerManager.getInformationLogger().info(infoBuilder.toString()); + infoBuilder.setLength(0); + username = submitResult.getUser(); + jobId = submitResult.getJobID(); + } + + // async job, return + if (isAsync) { + return new InteractiveJobResult( + jobInfoResult.getJobStatus().isJobSubmitted(), + "Async Submission Success", + new HashMap<>()); + } + + CliUtils.doSleepQuietly(2000l); + + // get log while running + LogRetriever logRetriever = + new LogRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + submitResult.getStrongerExecId(), + true, + oper, + new LogPresenter()); + // async because we need to query job status + logRetriever.retrieveLogAsync(); + + // wait complete + jobInfoResult = + waitJobComplete( + submitResult.getUser(), submitResult.getJobID(), submitResult.getStrongerExecId()); + logRetriever.waitIncLogComplete(); + + // get result-set + String outputPath = + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); + ResultPresenter presenter; + if (StringUtils.isBlank(outputPath)) { + presenter = new ResultPresenter(); + } else { + presenter = new ResultPresenter(true, outputPath); + } + + ResultRetriever resultRetriever = + new ResultRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + submitResult.getStrongerExecId(), + oper, + presenter); + + JobResult result = getResult(jobInfoResult, resultRetriever); + + return result; + } + + private JobResult getResult( + LinkisOperResultAdapter jobInfoResult, ResultRetriever resultRetriever) + throws LinkisClientRuntimeException { + if (!jobInfoResult.getJobStatus().isJobSuccess()) { + LoggerManager.getInformationLogger() + .info( + "Job status is not success but \'" + + jobInfoResult.getJobStatus() + + "\'. Will not try to retrieve any Result"); + Map extraMap = new HashMap<>(); + if (jobInfoResult.getErrCode() != null) { + extraMap.put("errorCode", String.valueOf(jobInfoResult.getErrCode())); + } + if (StringUtils.isNotBlank(jobInfoResult.getErrDesc())) { + extraMap.put("errorDesc", jobInfoResult.getErrDesc()); + } + return new InteractiveJobResult(false, "Execute Error!!!", extraMap); + } + InteractiveJobResult result = + new InteractiveJobResult(true, "Execute Success!!!", new HashMap<>()); + try { + resultRetriever.retrieveResultSync(); + result.setSuccess(true); + result.setMessage("execute success!!!"); + } catch (LinkisClientExecutionException e) { + if (e.getCode().equals("EXE0037")) { + result.setSuccess(true); + result.setMessage("execute success!!!"); + LoggerManager.getInformationLogger().warn(e.getMessage()); + } else { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + } + resultRetriever.setResultFin(); // inform listener to stop + } catch (Exception e) { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + resultRetriever.setResultFin(); // inform listener to stop + } + return result; + } + + private LinkisOperResultAdapter waitJobComplete(String user, String jobId, String execId) + throws LinkisClientRuntimeException { + int retryCnt = 0; + final int MAX_RETRY = 30; + + LinkisOperResultAdapter jobInfoResult = oper.queryJobInfo(user, jobId); + oper.queryJobStatus(user, jobId, execId); + + while (!jobInfoResult.getJobStatus().isJobFinishedState()) { + // query progress + try { + jobInfoResult = oper.queryJobInfo(user, jobId); + oper.queryJobStatus(jobInfoResult.getUser(), jobInfoResult.getJobID(), execId); + } catch (Exception e) { + logger.warn("", e); + retryCnt++; + if (retryCnt >= MAX_RETRY) { + throw new LinkisClientExecutionException( + "EXE0013", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Cannot get jobStatus from server continuously for {0} seconds. Client aborted! Error message: \n", + MAX_RETRY * 5 * CliConstants.JOB_QUERY_SLEEP_MILLS, + e); + } + CliUtils.doSleepQuietly( + 5 * CliConstants.JOB_QUERY_SLEEP_MILLS); // maybe server problem. sleep + // longer + continue; + } + retryCnt = 0; // reset counter + if (jobInfoResult.getJobStatus().isJobAbnormalStatus()) { + throw new LinkisClientExecutionException( + "EXE0006", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Job is in abnormal status: " + CliUtils.GSON.toJson(jobInfoResult)); + } + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + } + return jobInfoResult; + } + + @Override + public void onDestroy() { + if (StringUtils.isBlank(username) || StringUtils.isBlank(jobId)) { + logger.warn("Failed to kill job username or jobId is blank"); + return; + } + if (isAsync) { + return; + } + try { + new JobKiller(oper).doKill(username, jobId); + } catch (Exception e) { + logger.error("Failed to kill job", e); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java new file mode 100644 index 00000000000..629c4668413 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.interactive; + +import java.util.Map; + +public class InteractiveJobDesc { + private String submitUser; + private String proxyUser; + private String creator; + private Map executionMap; + private Map paramConfMap; + private Map paramRunTimeMap; + private Map paramVarsMap; + private Map labelMap; + private Map sourceMap; + + // 需要加到header中的一些参数 + private Map headers; + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getProxyUser() { + return proxyUser; + } + + public void setProxyUser(String proxyUser) { + this.proxyUser = proxyUser; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public Map getParamConfMap() { + return paramConfMap; + } + + public void setParamConfMap(Map paramConfMap) { + this.paramConfMap = paramConfMap; + } + + public Map getParamRunTimeMap() { + return paramRunTimeMap; + } + + public void setParamRunTimeMap(Map paramRunTimeMap) { + this.paramRunTimeMap = paramRunTimeMap; + } + + public Map getExecutionMap() { + return executionMap; + } + + public void setExecutionMap(Map executionMap) { + this.executionMap = executionMap; + } + + public Map getParamVarsMap() { + return paramVarsMap; + } + + public void setParamVarsMap(Map paramVarsMap) { + this.paramVarsMap = paramVarsMap; + } + + public Map getSourceMap() { + return sourceMap; + } + + public void setSourceMap(Map sourceMap) { + this.sourceMap = sourceMap; + } + + public Map getLabelMap() { + return labelMap; + } + + public void setLabelMap(Map labelMap) { + this.labelMap = labelMap; + } + + public Map getHeaders() { + return headers; + } + + public void setHeaders(Map headers) { + this.headers = headers; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java new file mode 100644 index 00000000000..2b0b20188a3 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.interactive; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.interactor.job.common.KeyParser; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; + +import java.util.*; + +public class InteractiveJobDescBuilder { + + public static InteractiveJobDesc build(CliCtx ctx) { + InteractiveJobDesc desc = new InteractiveJobDesc(); + + VarAccess stdVarAccess = ctx.getVarAccess(); + + Map confMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_PARAM_CONF); + Map runtimeMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_PARAM_RUNTIME); + Map varMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_PARAM_VAR); + Map labelMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_LABEL); + Map sourceMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_SOURCE); + Map executionMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_EXEC); + + confMap = confMap == null ? new HashMap<>() : confMap; + runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; + varMap = varMap == null ? new HashMap<>() : varMap; + labelMap = labelMap == null ? new HashMap<>() : labelMap; + sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; + executionMap = executionMap == null ? new HashMap<>() : executionMap; + + /** remove key prefix of all keys in map type params. e.g. kv in confMap, labelMap etc. */ + confMap = KeyParser.removePrefixForKeysInMap(confMap); + runtimeMap = KeyParser.removePrefixForKeysInMap(runtimeMap); + labelMap = KeyParser.removePrefixForKeysInMap(labelMap); + sourceMap = KeyParser.removePrefixForKeysInMap(sourceMap); + executionMap = KeyParser.removePrefixForKeysInMap(executionMap); + + /** remove key prefix of non-map type params */ + for (String key : stdVarAccess.getAllVarKeys()) { + Object val = stdVarAccess.getVar(Object.class, key); + if (!(val instanceof Map) && val != null) { + // note that we allow it to overwrite existing values in map + if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_CONF)) { + KeyParser.removePrefixAndPutValToMap(confMap, key, val, CliKeys.JOB_PARAM_CONF); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_VAR)) { + KeyParser.removePrefixAndPutValToMap(varMap, key, val, CliKeys.JOB_PARAM_VAR); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_RUNTIME)) { + KeyParser.removePrefixAndPutValToMap(runtimeMap, key, val, CliKeys.JOB_PARAM_RUNTIME); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_EXEC)) { + KeyParser.removePrefixAndPutValToMap(executionMap, key, val, CliKeys.JOB_EXEC); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_LABEL)) { + KeyParser.removePrefixAndPutValToMap(labelMap, key, val, CliKeys.JOB_LABEL); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_SOURCE)) { + KeyParser.removePrefixAndPutValToMap(sourceMap, key, val, CliKeys.JOB_SOURCE); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.LINKIS_CLIENT_COMMON)) { + // do nothing + } else { + // confMap.put(key, stdVarAccess.getVar(Object.class, key)); + } + } + } + + Boolean asyncSubmission = + stdVarAccess.getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + + String creator; + if (!asyncSubmission) { + creator = + stdVarAccess.getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_DEFAULT); + } else { + creator = + stdVarAccess.getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_ASYNC_DEFAULT); + } + String code = stdVarAccess.getVar(String.class, CliKeys.JOB_EXEC_CODE); + String engineType = stdVarAccess.getVar(String.class, CliKeys.JOB_LABEL_ENGINE_TYPE); + String runType = stdVarAccess.getVar(String.class, CliKeys.JOB_LABEL_CODE_TYPE); + String scriptPath = + stdVarAccess.getVarOrDefault(String.class, CliKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); + + String osUser = System.getProperty(CliKeys.LINUX_USER_KEY); + String[] adminUsers = StringUtils.split(CliKeys.ADMIN_USERS, ','); + Set adminSet = new HashSet<>(); + for (String admin : adminUsers) { + adminSet.add(admin); + } + String submitUsr = CliUtils.getSubmitUser(stdVarAccess, osUser, adminSet); + String proxyUsr = CliUtils.getProxyUser(stdVarAccess, submitUsr, adminSet); + + String enableExecuteOnce = + stdVarAccess.getVarOrDefault(String.class, CliKeys.JOB_LABEL_EXECUTEONCE, "true"); + // default executeOnce-mode + if (Boolean.parseBoolean(enableExecuteOnce)) { + labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); + } else { + labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); + } + String codePath = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_CODE_PATH); + Object extraArgsObj = stdVarAccess.getVar(Object.class, CliKeys.JOB_EXTRA_ARGUMENTS); + if (extraArgsObj != null + && extraArgsObj instanceof String[] + && StringUtils.isBlank(code) + && StringUtils.isBlank(codePath)) { + String[] extraArgs = (String[]) extraArgsObj; + codePath = extraArgs[0]; + if (extraArgs.length > 1) { + runtimeMap.put( + LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); + } + } + + if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { + try { + code = CliUtils.readFile(codePath); + } catch (Exception e) { + LoggerManager.getInformationLogger().error("Failed to read file", e); + throw e; + } + } + + executionMap.put(LinkisKeys.KEY_CODE, code); + labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); + labelMap.put(LinkisKeys.KEY_CODETYPE, runType); + labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); + sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); + if (ctx.getExtraMap().containsKey(CliKeys.VERSION)) { + sourceMap.put(LinkisKeys.CLI_VERSION, ctx.getExtraMap().get(CliKeys.VERSION)); + } + runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); + + desc.setCreator(creator); + desc.setParamConfMap(confMap); + desc.setParamRunTimeMap(runtimeMap); + desc.setParamVarsMap(varMap); + desc.setLabelMap(labelMap); + desc.setSourceMap(sourceMap); + desc.setExecutionMap(executionMap); + desc.setSubmitUser(submitUsr); + desc.setProxyUser(proxyUsr); + + return desc; + } + + public static LinkisJobOper generateOperator(CliCtx ctx) { + LinkisJobOper linkisJobOperator = new LinkisJobOper(); + linkisJobOperator.setUJESClient(UJESClientFactory.getReusable(ctx.getVarAccess())); + linkisJobOperator.setServerUrl( + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL)); + return linkisJobOperator; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobResult.java new file mode 100644 index 00000000000..92999169ede --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.interactive; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class InteractiveJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public InteractiveJobResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDesc.java new file mode 100644 index 00000000000..99f03650043 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDesc.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +import java.util.Map; + +public class JobCmdDesc { + private String jobId; + private String user; + private JobCmdSubType subType; + + private Map params; + + public JobCmdSubType getSubType() { + return subType; + } + + public void setSubType(JobCmdSubType subType) { + this.subType = subType; + } + + public String getJobID() { + return jobId; + } + + public void setJobId(String jobId) { + this.jobId = jobId; + } + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public Map getParams() { + return params; + } + + public void setParams(Map params) { + this.params = params; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDescBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDescBuilder.java new file mode 100644 index 00000000000..1e52e6e5325 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDescBuilder.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.utils.CliUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.util.HashSet; +import java.util.Set; + +public class JobCmdDescBuilder { + public static JobCmdDesc build(CliCtx ctx) { + JobCmdDesc desc = new JobCmdDesc(); + String osUser = System.getProperty(CliKeys.LINUX_USER_KEY); + String[] adminUsers = StringUtils.split(CliKeys.ADMIN_USERS, ','); + Set adminSet = new HashSet<>(); + for (String admin : adminUsers) { + adminSet.add(admin); + } + String submitUsr = CliUtils.getSubmitUser(ctx.getVarAccess(), osUser, adminSet); + + JobCmdSubType subType = null; + + String jobId = null; + if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_KILL_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_KILL_OPT); + subType = JobCmdSubType.KILL; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_STATUS_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_STATUS_OPT); + subType = JobCmdSubType.STATUS; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_DESC_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_DESC_OPT); + subType = JobCmdSubType.DESC; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LOG_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_LOG_OPT); + subType = JobCmdSubType.LOG; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_RESULT_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_RESULT_OPT); + subType = JobCmdSubType.RESULT; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LIST_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_LIST_OPT); + subType = JobCmdSubType.LIST; + } + desc.setSubType(subType); + desc.setJobId(jobId); + desc.setUser(submitUsr); + return desc; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJob.java new file mode 100644 index 00000000000..e39be38f429 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJob.java @@ -0,0 +1,212 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.JobKiller; +import org.apache.linkis.cli.application.interactor.job.common.KillResult; +import org.apache.linkis.cli.application.interactor.job.common.LogRetriever; +import org.apache.linkis.cli.application.interactor.job.common.ResultRetriever; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.present.JobInfoPresenter; +import org.apache.linkis.cli.application.present.LogPresenter; +import org.apache.linkis.cli.application.present.ResultPresenter; +import org.apache.linkis.cli.application.present.model.LinkisJobInfoModel; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.util.HashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JobCmdJob implements Job { + private static final Logger logger = LoggerFactory.getLogger(JobCmdJob.class); + + protected CliCtx ctx; + + protected LinkisJobOper oper; + + protected JobCmdDesc desc; + + @Override + public void build(CliCtx ctx) { + this.ctx = ctx; + this.desc = JobCmdDescBuilder.build(ctx); + this.oper = (LinkisJobOper) OperManager.getNew(CliKeys.Linkis_OPER, ctx); + } + + @Override + public JobResult run() { + JobCmdSubType subType = desc.getSubType(); + if (!(subType instanceof JobCmdSubType)) { + throw new LinkisClientExecutionException( + "EXE0030", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "JobSubType is not instance of JobManSubType"); + } + JobCmdJobResult result = new JobCmdJobResult(true, "Execute Success!!!", new HashMap<>()); + + switch (subType) { + case STATUS: + try { + LinkisOperResultAdapter jobInfoResult = + oper.queryJobInfo(desc.getUser(), desc.getJobID()); + LinkisJobInfoModel model = new LinkisJobInfoModel(); + model.buildModel(jobInfoResult); + new JobInfoPresenter().present(model); + } catch (Exception e) { + result.setSuccess(false); + result.setMessage(ExceptionUtils.getStackTrace(e)); + } + if (!result.isSuccess()) { + LoggerManager.getPlaintTextLogger() + .error("Failed to get job-info. Message: " + result.getMessage()); + } + return result; + case LOG: + try { + // get log while running + LinkisOperResultAdapter jobInfoResult = + oper.queryJobInfo(desc.getUser(), desc.getJobID()); + LogRetriever logRetriever = + new LogRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId(), + false, + oper, + new LogPresenter()); + // async because we need to query job status + logRetriever.retrieveLogAsync(); + logRetriever.waitIncLogComplete(); + } catch (Exception e) { + result.setSuccess(false); + result.setMessage(ExceptionUtils.getStackTrace(e)); + } + if (!result.isSuccess()) { + LoggerManager.getInformationLogger() + .error("Failed to get log. Message: " + result.getMessage()); + } + return result; + case RESULT: + // get log while running + LinkisOperResultAdapter jobInfoResult = oper.queryJobInfo(desc.getUser(), desc.getJobID()); + // get result-set + String outputPath = + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); + ResultPresenter presenter; + if (StringUtils.isBlank(outputPath)) { + presenter = new ResultPresenter(); + } else { + presenter = new ResultPresenter(true, outputPath); + } + + ResultRetriever resultRetriever = + new ResultRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId(), + oper, + presenter); + + result = getResult(resultRetriever); + if (!result.isSuccess()) { + LoggerManager.getInformationLogger() + .error("Failed to get result. Message: " + result.getMessage()); + } + return result; + case KILL: + JobKiller jobKiller = new JobKiller(oper); + KillResult killResult; + try { + killResult = jobKiller.doKill(desc.getUser(), desc.getJobID()); + } catch (Exception e) { + killResult = + new KillResult( + false, + "Failed to kill job. Messgae: " + ExceptionUtils.getStackTrace(e), + new HashMap<>()); + } + if (killResult.isSuccess()) { + LoggerManager.getPlaintTextLogger().info("Kill Success. Current job-info:"); + } else { + LoggerManager.getPlaintTextLogger() + .error("Kill Failed. Messgae: " + killResult.getMessage() + "\n Current job-info:"); + } + try { + LinkisOperResultAdapter jobInfoResult2 = + oper.queryJobInfo(desc.getUser(), desc.getJobID()); + LinkisJobInfoModel model = new LinkisJobInfoModel(); + model.buildModel(jobInfoResult2); + new JobInfoPresenter().present(model); + } catch (Exception e) { + LoggerManager.getInformationLogger().error("Failed to get jobInfo", e); + } + return new JobCmdJobResult( + killResult.isSuccess(), killResult.getMessage(), killResult.getExtraMessage()); + // case LIST: + // break; + // case JOB_DESC: + // break; + default: + return new JobCmdJobResult( + false, "JobSubType + \"" + subType + "\" is not supported", new HashMap<>()); + } + } + + private JobCmdJobResult getResult(ResultRetriever resultRetriever) + throws LinkisClientRuntimeException { + JobCmdJobResult result = new JobCmdJobResult(true, "Execute Success!!!", new HashMap<>()); + try { + resultRetriever.retrieveResultSync(); + result.setSuccess(true); + result.setMessage("execute success!!!"); + } catch (LinkisClientExecutionException e) { + if (e.getCode().equals("EXE0037")) { + result.setSuccess(true); + result.setMessage("execute success!!!"); + LoggerManager.getInformationLogger().warn(e.getMessage()); + } else { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + } + resultRetriever.setResultFin(); // inform listener to stop + } catch (Exception e) { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + resultRetriever.setResultFin(); // inform listener to stop + } + return result; + } + + @Override + public void onDestroy() {} +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJobResult.java new file mode 100644 index 00000000000..c5b9b435359 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJobResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class JobCmdJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public JobCmdJobResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdSubType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdSubType.java new file mode 100644 index 00000000000..b2d298995aa --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdSubType.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +public enum JobCmdSubType { + KILL("kill"), + LOG("log"), + DESC("desc"), + STATUS("status"), + LIST("list"), + RESULT("result"); + + private String name; + + JobCmdSubType(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/LinkisOnceJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/LinkisOnceJob.java new file mode 100644 index 00000000000..fac387998ee --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/LinkisOnceJob.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.once; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.once.OnceJobOper; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import java.util.HashMap; +import java.util.concurrent.CountDownLatch; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LinkisOnceJob implements Job { + + private static final Logger logger = LoggerFactory.getLogger(LinkisOnceJob.class); + + private Boolean isAsync = false; + private OnceJobOper oper; + + @Override + public void build(CliCtx ctx) { + this.isAsync = + ctx.getVarAccess().getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + oper = (OnceJobOper) OperManager.getNew(CliKeys.LINKIS_ONCE, ctx); + } + + @Override + public JobResult run() { + StringBuilder infoBuilder = new StringBuilder(); + infoBuilder.append("connecting to linkis gateway:").append(oper.getServerUrl()); + LoggerManager.getInformationLogger().info(infoBuilder.toString()); + + /** submit */ + oper.submit(); + JobStatus jobStatus = oper.getStatus(); + infoBuilder.setLength(0); + infoBuilder.append("JobId:").append(oper.getJobID()).append(System.lineSeparator()); + LoggerManager.getPlaintTextLogger().info(infoBuilder.toString()); + if (isAsync && jobStatus != null && jobStatus.isJobSubmitted()) { + return new OnceJobResult(true, "Submit Success!!!", new HashMap<>()); + } + + /** getLog */ + CountDownLatch latch = new CountDownLatch(1); + try { + Thread logConsumer = new Thread(() -> ProcessLog(latch), "Log-Consumer"); + SchedulerManager.getCachedThreadPoolExecutor().execute(logConsumer); + } catch (Exception e) { + logger.warn("Failed to retrieve log", e); + } + + /** wait complete */ + oper.waitForComplete(); + try { + latch.await(); + } catch (Exception e) { + // ignore + } + + JobStatus finalStatus = oper.getStatus(); + + if (finalStatus.isJobSuccess()) { + return new OnceJobResult(true, "Execute Success!!!", new HashMap<>()); + } else { + return new OnceJobResult(false, "Execute Failure!!!", new HashMap<>()); + } + } + + @Override + public void onDestroy() { + oper.kill(); + } + + private void ProcessLog(CountDownLatch latch) { + while (!oper.isLogFin()) { + String log = oper.getCurrentLog(); + LoggerManager.getPlaintTextLogger().info(log); + CliUtils.doSleepQuietly(2000l); + } + latch.countDown(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/OnceJobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/OnceJobResult.java new file mode 100644 index 00000000000..2a14f76a6f4 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/OnceJobResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.once; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class OnceJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public OnceJobResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJob.java new file mode 100644 index 00000000000..599f97904f9 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJob.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.version; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import java.util.HashMap; +import java.util.Map; + +public class VersionJob implements Job { + private CliCtx ctx; + + @Override + public void build(CliCtx cliCtx) { + this.ctx = cliCtx; + } + + @Override + public JobResult run() { + String version = (String) ctx.getExtraMap().get(CliKeys.VERSION); + Map extraMap = new HashMap<>(); + extraMap.put(CliKeys.VERSION, version); + LoggerManager.getPlaintTextLogger().info("Version=" + version); + return new VersionJobResult(true, "ok", extraMap); + } + + @Override + public void onDestroy() {} +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJobResult.java new file mode 100644 index 00000000000..e2f12cd7c28 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJobResult.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.version; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class VersionJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMsg; + + public VersionJobResult(Boolean success, String message, Map extraMsg) { + this.success = success; + this.message = message; + this.extraMsg = extraMsg; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + @Override + public Map getExtraMessage() { + return extraMsg; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/properties/ClientProperties.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/ClientProperties.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/properties/ClientProperties.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/ClientProperties.java index 00c36a3032e..6d165eeed68 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/properties/ClientProperties.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/ClientProperties.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.properties; +package org.apache.linkis.cli.application.interactor.properties; import java.util.HashMap; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropertiesLoader.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropertiesLoader.java new file mode 100644 index 00000000000..d66cfe605d7 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropertiesLoader.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.properties; + +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.properties.reader.PropertiesReader; + +import java.util.*; + +public class PropertiesLoader { + Map readersMap; + + public PropertiesLoader() { + this.readersMap = new HashMap<>(); + } + + public PropertiesLoader setPropertiesReaders(PropertiesReader[] readers) { + this.readersMap = new HashMap<>(); + for (PropertiesReader reader : readers) { + readersMap.put(reader.getPropsId(), reader); + } + return this; + } + + public PropertiesLoader addPropertiesReader(PropertiesReader reader) { + if (reader != null) { + readersMap.put(reader.getPropsId(), reader); + } + return this; + } + + public PropertiesLoader addPropertiesReaders(PropertiesReader[] readers) { + if (readers != null && readers.length > 0) { + for (PropertiesReader reader : readers) { + readersMap.put(reader.getPropsId(), reader); + } + } + return this; + } + + public void removePropertiesReader(String identifier) { + readersMap.remove(identifier); + } + + public ClientProperties[] loadProperties() { + checkInit(); + List propsList = new ArrayList<>(); + PropertiesReader readerTmp; + for (Map.Entry entry : readersMap.entrySet()) { + readerTmp = entry.getValue(); + Properties props = readerTmp.getProperties(); + ClientProperties clientProperties = new ClientProperties(); + clientProperties.putAll(props); + clientProperties.setPropsId(readerTmp.getPropsId()); + clientProperties.setPropertiesSourcePath(readerTmp.getPropsPath()); + propsList.add(clientProperties); + } + return propsList.toArray(new ClientProperties[propsList.size()]); + } + + public void checkInit() { + if (readersMap == null || readersMap.size() == 0) { + throw new PropsException( + "PRP0003", + ErrorLevel.ERROR, + CommonErrMsg.PropsLoaderInitErr, + "properties loader is not inited because it contains no reader"); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropsFilesScanner.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropsFilesScanner.java similarity index 81% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropsFilesScanner.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropsFilesScanner.java index 2bd120e5ca4..8e343d09482 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropsFilesScanner.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropsFilesScanner.java @@ -15,14 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.properties; +package org.apache.linkis.cli.application.interactor.properties; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.properties.reader.PropertiesReader; +import org.apache.linkis.cli.application.interactor.properties.reader.PropsFileReader; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; @@ -51,7 +51,7 @@ public List getPropsFiles(String rootPath) { try { files = (List) - FileUtils.listFiles(new File(rootPath), CommonConstants.CONFIG_EXTENSION, false); + FileUtils.listFiles(new File(rootPath), CliConstants.CONFIG_EXTENSION, false); } catch (Exception e) { throw new PropsException( "PRP0005", diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropertiesReader.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropertiesReader.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropertiesReader.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropertiesReader.java index 3c86f1b1c36..65bc2d1e06d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropertiesReader.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropertiesReader.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.properties.reader; +package org.apache.linkis.cli.application.interactor.properties.reader; import java.util.Properties; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropsFileReader.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropsFileReader.java index 024a83311e4..d94b64eb621 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropsFileReader.java @@ -15,11 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.properties.reader; +package org.apache.linkis.cli.application.interactor.properties.reader; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.commons.lang3.StringUtils; @@ -73,7 +73,9 @@ public Properties getProperties() { "PRP0002", ErrorLevel.ERROR, CommonErrMsg.PropsReaderErr, "Source: " + propsPath, e); } finally { try { - in.close(); + if (null != in) { + in.close(); + } } catch (Exception ignore) { // ignore } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/InteractiveDescValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/InteractiveDescValidator.java new file mode 100644 index 00000000000..03eef756129 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/InteractiveDescValidator.java @@ -0,0 +1,256 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.validate; + +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.interactive.InteractiveJobDesc; + +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; + +public class InteractiveDescValidator { + + public void doValidation(InteractiveJobDesc desc) throws LinkisClientRuntimeException { + boolean ok = true; + StringBuilder reasonSb = new StringBuilder(); + if (StringUtils.isBlank(desc.getSubmitUser())) { + reasonSb.append("Submit User cannot be empty or blank").append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank(desc.getProxyUser())) { + reasonSb + .append("proxy(execute) User cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (desc.getLabelMap() == null) { + reasonSb.append("labelMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getExecutionMap() == null) { + reasonSb.append("ExecutionMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getSourceMap() == null) { + reasonSb.append("SourceMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getParamConfMap() == null) { + reasonSb.append("startupMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getParamVarsMap() == null) { + reasonSb.append("variableMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getParamRunTimeMap() == null) { + reasonSb.append("runTimeMap cannot be null").append(System.lineSeparator()); + ok = false; + } + for (Map.Entry entry : desc.getExecutionMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("ExecutionMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + } + for (Map.Entry entry : desc.getLabelMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("LabelMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + Object val = entry.getValue(); + if (val instanceof String) { + if (StringUtils.contains((String) val, " ")) { + reasonSb + .append("LabelMap value cannot contains space character. key: ") + .append(entry.getKey()) + .append("value: ") + .append(val) + .append(System.lineSeparator()); + ok = false; + } + } + } + for (Map.Entry entry : desc.getParamConfMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("startUpMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + // Object val = entry.getValue(); + // if (val instanceof String) { + // if (StringUtils.contains((String) val, " ")) { + // reasonSb.append("startUpMap value cannot contains space character. + // key: ") + // .append(entry.getKey()).append("value: ").append(val) + // .append(System.lineSeparator()); + // ok = false; + // } + // } + } + // for (Map.Entry entry : linkisJob.getParamRunTimeMap().entrySet()) + // { + // if (StringUtils.contains(entry.getKey(), " ")) { + // reasonSb.append("runtimeMap key cannot contains space character. key: + // ").append(entry.getKey()).append(System.lineSeparator()); + // ok = false; + // } + // Object val = entry.getValue(); + // if (val instanceof String) { + // if (StringUtils.contains((String) val, " ")) { + // reasonSb.append("runtimeMap value cannot contains space character. + // key: ") + // .append(entry.getKey()).append("value: ").append(val) + // .append(System.lineSeparator()); + // ok = false; + // } + // } + // } + for (Map.Entry entry : desc.getParamVarsMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("variablesMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + Object val = entry.getValue(); + // if (val instanceof String) { + // if (StringUtils.contains((String) val, " ")) { + // reasonSb.append("variablesMap value cannot contains space + // character. key: ") + // .append(entry.getKey()).append("value: ").append(val) + // .append(System.lineSeparator()); + // ok = false; + // } + // } + } + for (Map.Entry entry : desc.getSourceMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("sourceMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + Object val = entry.getValue(); + if (val instanceof String) { + if (StringUtils.contains((String) val, " ")) { + reasonSb + .append("sourceMap value cannot contains space character. key: ") + .append(entry.getKey()) + .append("value: ") + .append(val) + .append(System.lineSeparator()); + ok = false; + } + } + } + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { + reasonSb + .append(LinkisKeys.KEY_ENGINETYPE) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { + reasonSb + .append(LinkisKeys.KEY_CODETYPE) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { + reasonSb + .append(LinkisKeys.KEY_SCRIPT_PATH) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getExecutionMap().get(LinkisKeys.KEY_CODE)) + && StringUtils.indexOfIgnoreCase( + (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") + == -1) { + reasonSb + .append(LinkisKeys.KEY_CODE) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { + reasonSb + .append(LinkisKeys.KEY_USER_CREATOR) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } else { + String userCreator = (String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); + if (StringUtils.indexOf(desc.getProxyUser(), "-") != -1) { + reasonSb + .append("\'proxyUser\' should not contain special character \'-\'") + .append(System.lineSeparator()); + ok = false; + } else { + int idx = StringUtils.indexOf(userCreator, "-"); + if (idx == -1) { + reasonSb + .append(LinkisKeys.KEY_USER_CREATOR) + .append("should contain exactly one character \'-\'") + .append(System.lineSeparator()); + ok = false; + } else { + String user = StringUtils.substring(userCreator, 0, idx); + String creator = StringUtils.substring(userCreator, idx + 1); + if (StringUtils.isBlank(user) || StringUtils.isBlank(creator)) { + reasonSb.append("user or creator should not be blank").append(System.lineSeparator()); + ok = false; + } else { + // String forBiddenChars = "~!$%^&*-,./?|{}[]:;'()+="; + String forBiddenChars = "-"; + if (StringUtils.containsAny(creator, forBiddenChars)) { + reasonSb + .append("\'creator\' should not contain any special characters except \'_\'") + .append(System.lineSeparator()); + ok = false; + } + } + } + } + } + if (!ok) { + throw new ValidateException( + "VLD0008", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "LinkisJob validation failed. Reason: " + reasonSb.toString()); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/JobCmdDescValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/JobCmdDescValidator.java new file mode 100644 index 00000000000..95b95d5833c --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/JobCmdDescValidator.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.validate; + +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.jobcmd.JobCmdDesc; + +import org.apache.commons.lang3.StringUtils; + +public class JobCmdDescValidator { + public void doValidation(JobCmdDesc desc) throws LinkisClientRuntimeException { + boolean ok = true; + StringBuilder reasonSb = new StringBuilder(); + if (StringUtils.isBlank(desc.getJobID())) { + reasonSb.append("jobId cannot be empty or blank").append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank(desc.getUser())) { + reasonSb.append("user cannot be empty or blank").append(System.lineSeparator()); + ok = false; + } + if (!ok) { + throw new ValidateException( + "VLD0008", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "LinkisJobMan validation failed. Reason: " + reasonSb.toString()); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/OnceDescValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/OnceDescValidator.java new file mode 100644 index 00000000000..f62cafe0541 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/OnceDescValidator.java @@ -0,0 +1,278 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.validate; + +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.operator.once.OnceJobDesc; + +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; + +public class OnceDescValidator { + public void doValidation(OnceJobDesc desc) throws LinkisClientRuntimeException { + boolean ok = true; + StringBuilder reasonSb = new StringBuilder(); + if (StringUtils.isBlank(desc.getSubmitUser())) { + reasonSb.append("Submit User cannot be empty or blank").append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank(desc.getProxyUser())) { + reasonSb + .append("proxy(execute) User cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (desc.getLabelMap() == null) { + reasonSb.append("labelMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getExecutionMap() == null) { + reasonSb.append("ExecutionMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getSourceMap() == null) { + reasonSb.append("SourceMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getParamConfMap() == null) { + reasonSb.append("startupMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getParamVarsMap() == null) { + reasonSb.append("variableMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getParamRunTimeMap() == null) { + reasonSb.append("runTimeMap cannot be null").append(System.lineSeparator()); + ok = false; + } + if (desc.getJobContentMap() == null) { + reasonSb.append("jobContentMap cannot be null").append(System.lineSeparator()); + ok = false; + } + for (Map.Entry entry : desc.getExecutionMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("ExecutionMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + } + for (Map.Entry entry : desc.getLabelMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("LabelMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + Object val = entry.getValue(); + if (val instanceof String) { + if (StringUtils.contains((String) val, " ")) { + reasonSb + .append("LabelMap value cannot contains space character. key: ") + .append(entry.getKey()) + .append("value: ") + .append(val) + .append(System.lineSeparator()); + ok = false; + } + } + } + for (Map.Entry entry : desc.getParamConfMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("startUpMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + // Object val = entry.getValue(); + // if (val instanceof String) { + // if (StringUtils.contains((String) val, " ")) { + // reasonSb.append("startUpMap value cannot contains space character. + // key: ") + // .append(entry.getKey()).append("value: ").append(val) + // .append(System.lineSeparator()); + // ok = false; + // } + // } + } + // for (Map.Entry entry : linkisJob.getParamRunTimeMap().entrySet()) + // { + // if (StringUtils.contains(entry.getKey(), " ")) { + // reasonSb.append("runtimeMap key cannot contains space character. key: + // ").append(entry.getKey()).append(System.lineSeparator()); + // ok = false; + // } + // Object val = entry.getValue(); + // if (val instanceof String) { + // if (StringUtils.contains((String) val, " ")) { + // reasonSb.append("runtimeMap value cannot contains space character. + // key: ") + // .append(entry.getKey()).append("value: ").append(val) + // .append(System.lineSeparator()); + // ok = false; + // } + // } + // } + for (Map.Entry entry : desc.getParamVarsMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("variablesMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + Object val = entry.getValue(); + // if (val instanceof String) { + // if (StringUtils.contains((String) val, " ")) { + // reasonSb.append("variablesMap value cannot contains space + // character. key: ") + // .append(entry.getKey()).append("value: ").append(val) + // .append(System.lineSeparator()); + // ok = false; + // } + // } + } + for (Map.Entry entry : desc.getSourceMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("sourceMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + Object val = entry.getValue(); + if (val instanceof String) { + if (StringUtils.contains((String) val, " ")) { + reasonSb + .append("sourceMap value cannot contains space character. key: ") + .append(entry.getKey()) + .append("value: ") + .append(val) + .append(System.lineSeparator()); + ok = false; + } + } + } + for (Map.Entry entry : desc.getJobContentMap().entrySet()) { + if (StringUtils.contains(entry.getKey(), " ")) { + reasonSb + .append("jobContentMap key cannot contains space character. key: ") + .append(entry.getKey()) + .append(System.lineSeparator()); + ok = false; + } + // Object val = entry.getValue(); + // if (val instanceof String) { + // if (StringUtils.contains((String) val, " ")) { + // reasonSb.append("jobContentMap value cannot contains space + // character. key: ") + // .append(entry.getKey()).append("value: ").append(val) + // .append(System.lineSeparator()); + // ok = false; + // } + // } + } + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { + reasonSb + .append(LinkisKeys.KEY_ENGINETYPE) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { + reasonSb + .append(LinkisKeys.KEY_CODETYPE) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { + reasonSb + .append(LinkisKeys.KEY_SCRIPT_PATH) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getExecutionMap().get(LinkisKeys.KEY_CODE)) + && StringUtils.indexOfIgnoreCase( + (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") + == -1) { + reasonSb + .append(LinkisKeys.KEY_CODE) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { + reasonSb + .append(LinkisKeys.KEY_USER_CREATOR) + .append(" cannot be empty or blank") + .append(System.lineSeparator()); + ok = false; + } else { + String userCreator = (String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); + if (StringUtils.indexOf(desc.getProxyUser(), "-") != -1) { + reasonSb + .append("\'proxyUser\' should not contain special character \'-\'") + .append(System.lineSeparator()); + ok = false; + } else { + int idx = StringUtils.indexOf(userCreator, "-"); + if (idx == -1) { + reasonSb + .append(LinkisKeys.KEY_USER_CREATOR) + .append("should contain exactly one character \'-\'") + .append(System.lineSeparator()); + ok = false; + } else { + String user = StringUtils.substring(userCreator, 0, idx); + String creator = StringUtils.substring(userCreator, idx + 1); + if (StringUtils.isBlank(user) || StringUtils.isBlank(creator)) { + reasonSb.append("user or creator should not be blank").append(System.lineSeparator()); + ok = false; + } else { + // String forBiddenChars = "~!$%^&*-,./?|{}[]:;'()+="; + String forBiddenChars = "-"; + if (StringUtils.containsAny(creator, forBiddenChars)) { + reasonSb + .append("\'creator\' should not contain any special characters except \'_\'") + .append(System.lineSeparator()); + ok = false; + } + } + } + } + } + if (!ok) { + throw new ValidateException( + "VLD0008", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "LinkisJob validation failed. Reason: " + reasonSb.toString()); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParamValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParamValidator.java similarity index 75% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParamValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParamValidator.java index abbc6959763..b54dd6b723d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParamValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParamValidator.java @@ -15,14 +15,12 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.validate; +package org.apache.linkis.cli.application.interactor.validate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; -public class ParamValidator implements Validator { - @Override +public class ParamValidator { public void doValidation(Object input) throws LinkisClientRuntimeException { if (!(input instanceof Params)) { // TODO:throw diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParsedTplValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParsedTplValidator.java new file mode 100644 index 00000000000..a7ea05c72b7 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParsedTplValidator.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.validate; + +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; + +import java.text.MessageFormat; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 1. Check if there is missing or unknown option. 2. Call checkParam method for command-specific + * validation. + */ +public class ParsedTplValidator { + private static final Logger logger = LoggerFactory.getLogger(ParsedTplValidator.class); + + public void doValidation(CmdTemplate parsedTemplateCopy) throws CommandException { + + String msg = "start validating command \"{0}\", template \"{1}\""; + logger.info( + MessageFormat.format( + msg, parsedTemplateCopy.getCmdType().getName(), parsedTemplateCopy.getCmdType())); + + checkOptions(parsedTemplateCopy); + + logger.info("Start params-check"); + parsedTemplateCopy.checkParams(); + logger.info("params-check ok."); + } + + /** Validation */ + private void checkOptions(CmdTemplate template) throws CommandException { + List> options = template.getOptions(); + for (CmdOption cmdOption : options) { + if (!cmdOption.hasVal() && !cmdOption.isOptional()) { + throw new ValidateException( + "VLD0003", + ErrorLevel.ERROR, + CommonErrMsg.ValidationErr, + "CmdOption value cannot be empty: paramName:" + + cmdOption.getParamName() + + "CmdType: " + + template.getCmdType()); + } + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java similarity index 75% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java index 8c0653cb4f4..9e5a9a64576 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java @@ -18,28 +18,18 @@ package org.apache.linkis.cli.application.interactor.validate; import org.apache.linkis.cli.application.constants.LinkisConstants; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.linkis.cli.application.operator.ujes.UJESClientContext; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; import org.apache.commons.lang3.StringUtils; -public class UJESContextValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof UJESClientContext)) { - throw new ValidateException( - "VLD0009", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of UJESContextValidator is not instance of UjesClientDriverContext"); - } +public class UJESContextValidator { + public void doValidation(UJESClientContext context) throws LinkisClientRuntimeException { boolean ok = true; StringBuilder reasonSb = new StringBuilder(); - UJESClientContext context = (UJESClientContext) input; if (StringUtils.isBlank(context.getGatewayUrl())) { reasonSb.append("gatewayUrl cannot be empty or blank").append(System.lineSeparator()); ok = false; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/var/VarAccessImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/var/VarAccessImpl.java new file mode 100644 index 00000000000..9a871828a34 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/var/VarAccessImpl.java @@ -0,0 +1,325 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.var; + +import org.apache.linkis.cli.application.entity.command.ParamItem; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.VarAccessException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.PredefinedStringConverters; +import org.apache.linkis.cli.application.interactor.properties.ClientProperties; + +import org.apache.commons.lang3.StringUtils; + +import java.util.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class VarAccessImpl implements VarAccess { + private static Logger logger = LoggerFactory.getLogger(VarAccessImpl.class); + private Params cmdParams; + private ClientProperties userConf; + private ClientProperties defaultConf; + private Map subMapCache; + + public VarAccessImpl setCmdParams(Params cmdParams) { + this.cmdParams = cmdParams; + return this; + } + + public Params getSubParam(String identifier) { + return this.cmdParams; + } + + public VarAccessImpl setUserConf(ClientProperties userConf) { + this.userConf = userConf; + return this; + } + + public ClientProperties getUserConf(String identifier) { + return this.userConf; + } + + public VarAccessImpl setDefaultConf(ClientProperties defaultConf) { + this.defaultConf = defaultConf; + return this; + } + + public ClientProperties getDefaultConf(String identifier) { + return this.defaultConf; + } + + public VarAccessImpl init() { + this.subMapCache = new HashMap<>(); + putSubMapCache(subMapCache, cmdParams); + return this; + } + + private void putSubMapCache(Map subMapCache, Params param) { + for (ParamItem item : param.getParamItemMap().values()) { + // scan through all map type value and try get value for key + if (item.getValue() != null + && item.hasVal() + && item.getValue() instanceof Map + && !(item.getValue() instanceof SpecialMap)) { + try { + Map subMap = (Map) item.getValue(); + for (Map.Entry entry : subMap.entrySet()) { + if (subMapCache.containsKey(item.getKey())) { + logger.warn( + "Value of duplicated key \"{}\" in subMap \"{}\" will be ignored.", + item.getKey(), + item.getKey()); + } else if (StringUtils.isNotBlank(entry.getKey()) + && StringUtils.isNotBlank(entry.getValue())) { + subMapCache.put(entry.getKey(), entry.getValue()); + } + } + } catch (ClassCastException e) { + logger.warn( + "Param: {} has an unsupported Map type(not Map). It wiil be ignored", + item.getKey()); + } + } + } + } + + public void checkInit() { + if (this.cmdParams == null || this.defaultConf == null || this.subMapCache == null) { + throw new VarAccessException( + "VA0002", + ErrorLevel.ERROR, + CommonErrMsg.VarAccessInitErr, + "stdVarAccess is not inited. " + + "cmdParams: " + + cmdParams + + "defaultConf: " + + defaultConf + + "subMapCache: " + + subMapCache); + } + } + + @Override + public T getVarOrDefault(Class clazz, String key, T defaultValue) { + + if (StringUtils.isBlank(key)) { + return null; + } + + T val = getVar(clazz, key); + + return val != null ? val : defaultValue; + } + + @Override + public T getVar(Class clazz, String key) { + checkInit(); + if (key == null || StringUtils.isBlank(key)) { + return null; + } + T p1 = getVarFromParam(clazz, key, cmdParams); + + T pd1 = getDefaultVarFromParam(clazz, key, cmdParams); + + T c1 = getVarFromCfg(clazz, key, userConf); + T c2 = getVarFromCfg(clazz, key, defaultConf); + + return p1 != null ? p1 : c1 != null ? c1 : c2 != null ? c2 : pd1; + } + + private T getVarFromParam(Class clazz, String key, Params params) { + if (params == null || StringUtils.isBlank(key)) { + return null; + } + + Object v1 = + params.getParamItemMap().containsKey(key) && params.getParamItemMap().get(key).hasVal() + ? setNullIfEmpty(params.getParamItemMap().get(key).getValue()) + : null; + + Object v2 = setNullIfEmpty(convertStringVal(clazz, subMapCache.getOrDefault(key, null))); + + // extraParam has lower priority + Object v3 = + params.getExtraProperties() == null + ? null + : setNullIfEmpty(params.getExtraProperties().getOrDefault(key, null)); + + Object retObj = v1 != null ? v1 : v2 != null ? v2 : v3; + + return clazz.cast(retObj); + } + + private boolean paramHasVar(String key, Params params) { + boolean b1 = + params.getParamItemMap().containsKey(key) && params.getParamItemMap().get(key).hasVal(); + boolean b2 = subMapCache.containsKey(key); + boolean b3 = params.getExtraProperties().containsKey(key); + return b1 || b2 || b3; + } + + private T getDefaultVarFromParam(Class clazz, String key, Params params) { + if (params == null || StringUtils.isBlank(key) || !params.getParamItemMap().containsKey(key)) { + return null; + } + + Object vd = setNullIfEmpty(params.getParamItemMap().get(key).getDefaultValue()); + + return clazz.cast(vd); + } + + private T getVarFromCfg(Class clazz, String key, ClientProperties conf) { + + if (conf == null) { + return null; + } + Object val = conf.get(key); + if (val == null) { + return null; + } + String strVal; + try { + strVal = (String) val; + } catch (ClassCastException e) { + throw new VarAccessException( + "VA0003", + ErrorLevel.ERROR, + CommonErrMsg.VarAccessErr, + "Cannot getVar \"" + key + "\" from config. Cause: value is not String"); + } + + return convertStringVal(clazz, strVal); + } + + private Object setNullIfEmpty(Object obj) { + Object ret; + if (obj instanceof String && StringUtils.isBlank((String) obj)) { + ret = null; + } else if (obj instanceof Map && ((Map) obj).size() == 0) { + ret = null; + } else if (obj instanceof Collections && ((Collection) obj).size() == 0) { + ret = null; + } else { + ret = obj; + } + return ret; + } + + private boolean cfgHasVar(String key, ClientProperties conf) { + return conf == null ? false : conf.containsKey(key); + } + + private T convertStringVal(Class clazz, String strVal) { + Object ret; + if (StringUtils.isBlank(strVal)) { + return null; + } + if (clazz == Object.class) { + ret = strVal; + } else if (clazz == String.class) { + ret = convertGivenConverter(strVal, PredefinedStringConverters.NO_CONVERTER); + } else if (clazz == Integer.class) { + ret = convertGivenConverter(strVal, PredefinedStringConverters.INT_CONVERTER); + } else if (clazz == Long.class) { + ret = convertGivenConverter(strVal, PredefinedStringConverters.LONG_CONVERTER); + } else if (clazz == Boolean.class) { + ret = convertGivenConverter(strVal, PredefinedStringConverters.BOOLEAN_CONVERTER); + } else if (Map.class.isAssignableFrom(clazz)) { + // TODO: throw or return null if not string map + ret = null; + // convertGivenConverter(strVal, + // PredefinedStringConverters.STRING_MAP_CONVERTER); + } else if (clazz == String[].class) { + ret = null; + // ret = convertGivenConverter(strVal, + // PredefinedStringConverters.STR_ARRAY_CONVERTER); + } else { + throw new VarAccessException( + "VA0004", + ErrorLevel.ERROR, + CommonErrMsg.VarAccessErr, + "Cannot convertStringVal \"" + + strVal + + "\" to " + + clazz.getCanonicalName() + + ": designated type is not supported"); + } + return clazz.cast(ret); + } + + private T convertGivenConverter(String strVal, AbstractStringConverter converter) { + return converter.convert(strVal); + } + + @Override + public String[] getAllVarKeys() { + List varKeys = new ArrayList<>(); + + addParamVarKeys(varKeys, cmdParams); + + addPropsVarKeys(varKeys, userConf); + addPropsVarKeys(varKeys, defaultConf); + + return varKeys.toArray(new String[varKeys.size()]); + } + + private void addParamVarKeys(List varKeys, Params param) { + if (param != null) { + for (String key : param.getParamItemMap().keySet()) { + if (!varKeys.contains(key)) { + varKeys.add(key); + } + } + for (String key : subMapCache.keySet()) { + // scan through all map type value and try add key + if (!varKeys.contains(key)) { + varKeys.add(key); + } + } + for (String key : param.getExtraProperties().keySet()) { + if (!varKeys.contains(key)) { + varKeys.add(key); + } + } + } + } + + private void addPropsVarKeys(List varKeys, ClientProperties props) { + if (props != null) { + for (Object key : props.keySet()) { + if (!varKeys.contains(key)) { + varKeys.add((String) key); + } + } + } + } + + @Override + public boolean hasVar(String key) { + boolean b1 = paramHasVar(key, cmdParams); + boolean b2 = cfgHasVar(key, userConf); + boolean b3 = cfgHasVar(key, defaultConf); + return b1 || b2 || b3; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/FetchResultEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/FetchResultEvent.java new file mode 100644 index 00000000000..d538a40dff9 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/FetchResultEvent.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.observer.event; + +public class FetchResultEvent extends SingleObserverEvent {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/JobOperBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/JobOperBuilder.java new file mode 100644 index 00000000000..0e1604b204e --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/JobOperBuilder.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator; + +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; + +public interface JobOperBuilder { + JobOper build(CliCtx ctx); +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/OperManager.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/OperManager.java new file mode 100644 index 00000000000..a41bfd5f9f8 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/OperManager.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator; + +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; + +import java.util.HashMap; +import java.util.Map; + +public class OperManager { + + private static Map builderMap = new HashMap<>(); + + public static void register(String name, JobOperBuilder builder) { + builderMap.put(name, builder); + } + + public static void remove(String name) { + builderMap.remove(name); + } + + public static JobOper getNew(String name, CliCtx ctx) { + JobOperBuilder builder = builderMap.get(name); + if (builder == null) { + return null; + } + return builder.build(ctx); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobDesc.java new file mode 100644 index 00000000000..2bb63c0e0e0 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobDesc.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.once; + +import org.apache.linkis.cli.application.entity.var.VarAccess; + +import java.util.Map; + +public class OnceJobDesc { + private VarAccess varAccess; + private String submitUser; + private String proxyUser; + private String creator; + private Map executionMap; + private Map paramConfMap; + private Map paramRunTimeMap; + private Map paramVarsMap; + private Map labelMap; + private Map sourceMap; + private Map jobContentMap; + + public VarAccess getVarAccess() { + return varAccess; + } + + public void setVarAccess(VarAccess varAccess) { + this.varAccess = varAccess; + } + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getProxyUser() { + return proxyUser; + } + + public void setProxyUser(String proxyUser) { + this.proxyUser = proxyUser; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public Map getParamConfMap() { + return paramConfMap; + } + + public void setParamConfMap(Map paramConfMap) { + this.paramConfMap = paramConfMap; + } + + public Map getParamRunTimeMap() { + return paramRunTimeMap; + } + + public void setParamRunTimeMap(Map paramRunTimeMap) { + this.paramRunTimeMap = paramRunTimeMap; + } + + public Map getExecutionMap() { + return executionMap; + } + + public void setExecutionMap(Map executionMap) { + this.executionMap = executionMap; + } + + public Map getParamVarsMap() { + return paramVarsMap; + } + + public void setParamVarsMap(Map paramVarsMap) { + this.paramVarsMap = paramVarsMap; + } + + public Map getSourceMap() { + return sourceMap; + } + + public void setSourceMap(Map sourceMap) { + this.sourceMap = sourceMap; + } + + public Map getLabelMap() { + return labelMap; + } + + public void setLabelMap(Map labelMap) { + this.labelMap = labelMap; + } + + public Map getJobContentMap() { + return jobContentMap; + } + + public void setJobContentMap(Map jobContentMap) { + this.jobContentMap = jobContentMap; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobOper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobOper.java new file mode 100644 index 00000000000..28ee2d5112f --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobOper.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.once; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.LinkisJobStatus; +import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; +import org.apache.linkis.computation.client.LinkisJobBuilder$; +import org.apache.linkis.computation.client.once.simple.SimpleOnceJob; +import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder; +import org.apache.linkis.computation.client.once.simple.SubmittableSimpleOnceJob; +import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator; +import org.apache.linkis.computation.client.operator.impl.EngineConnLogs; + +import org.apache.commons.lang3.StringUtils; + +public class OnceJobOper implements JobOper { + + EngineConnLogOperator logOperator = null; + private SimpleOnceJob onceJob; + private String serverUrl; + private String engineTypeForECM; + private Boolean isLogFin = false; + + public void init(OnceJobDesc desc) { + + VarAccess varAccess = desc.getVarAccess(); + + serverUrl = varAccess.getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL); + + LinkisJobBuilder$.MODULE$.setDefaultClientConfig( + UJESClientFactory.generateDWSClientConfig(varAccess)); + LinkisJobBuilder$.MODULE$.setDefaultUJESClient(UJESClientFactory.getReusable(varAccess)); + + String engineTypeRaw = (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE); + engineTypeForECM = engineTypeRaw; + + if (StringUtils.isNotBlank(engineTypeRaw)) { + engineTypeForECM = StringUtils.split(engineTypeRaw, "-")[0]; + } else { + engineTypeForECM = ""; + } // TODO: remove parsing and let server side parse engineType + + onceJob = + new SimpleOnceJobBuilder() + .setCreateService(CliConstants.LINKIS_CLI) + .addExecuteUser(desc.getProxyUser()) + .setStartupParams(desc.getParamConfMap()) + .setLabels(desc.getLabelMap()) + .setRuntimeParams(desc.getParamRunTimeMap()) + .setSource(desc.getSourceMap()) + .setVariableMap(desc.getParamVarsMap()) + .setJobContent(desc.getJobContentMap()) + .build(); + } + + public String getServerUrl() { + return serverUrl; + } + + public SimpleOnceJob getOnceJob() { + return onceJob; + } + + public void setOnceJob(SimpleOnceJob onceJob) { + this.onceJob = onceJob; + } + + private void panicIfNull(Object obj) { + if (obj == null) { + throw new LinkisClientExecutionException( + "EXE0040", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "Instance of is null"); + } + } + + public void submit() { + panicIfNull(onceJob); + if (!(onceJob instanceof SubmittableSimpleOnceJob)) { + throw new LinkisClientExecutionException( + "EXE0041", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "onceJob is not properly initiated"); + } + ((SubmittableSimpleOnceJob) onceJob).submit(); + } + + public void kill() { + panicIfNull(onceJob); + if (!getStatus().isJobFinishedState()) { + onceJob.kill(); + } + } + + public String getJobID() { + return onceJob.getId(); + } + + public String getUser() { + return "TODO"; + } + + public JobStatus getStatus() { + panicIfNull(onceJob); + String status = onceJob.getStatus(); + return LinkisJobStatus.convertFromNodeStatusString(status); + } + + public void waitForComplete() { + panicIfNull(onceJob); + onceJob.waitForCompleted(); + } + + public String getCurrentLog() { + panicIfNull(onceJob); + if (logOperator == null) { + logOperator = + (EngineConnLogOperator) onceJob.getOperator(EngineConnLogOperator.OPERATOR_NAME()); + logOperator.setECMServiceInstance( + ((SubmittableSimpleOnceJob) onceJob).getECMServiceInstance()); + logOperator.setEngineConnType(engineTypeForECM); + // logOperator.setPageSize(OnceJobConstants.MAX_LOG_SIZE_ONCE); + logOperator.setIgnoreKeywords(OnceJobConstants.LOG_IGNORE_KEYWORDS); + } + EngineConnLogs logs = + (EngineConnLogs) logOperator.apply(); // for some reason we have to add type conversion, + // otherwise mvn testCompile fails + StringBuilder logBuilder = new StringBuilder(); + for (String log : logs.logs()) { + logBuilder.append(log).append(System.lineSeparator()); + } + String status = onceJob.getStatus(); + LinkisJobStatus jobStatus = LinkisJobStatus.convertFromNodeStatusString(status); + if ((logs.logs() == null || logs.logs().size() <= 0) && jobStatus.isJobFinishedState()) { + isLogFin = true; + } + return logBuilder.toString(); + // System.out.println(logs.logs().size()); + } + + public Boolean isLogFin() { + return isLogFin; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceOperBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceOperBuilder.java new file mode 100644 index 00000000000..906fc03406b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceOperBuilder.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.once; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.interactor.job.common.KeyParser; +import org.apache.linkis.cli.application.operator.JobOperBuilder; +import org.apache.linkis.cli.application.utils.CliUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.util.*; + +public class OnceOperBuilder implements JobOperBuilder { + @Override + public JobOper build(CliCtx ctx) { + + OnceJobDesc desc = new OnceJobDesc(); + + Map confMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_PARAM_CONF); + Map runtimeMap = + ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_PARAM_RUNTIME); + Map varMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_PARAM_VAR); + Map labelMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_LABEL); + Map sourceMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_SOURCE); + Map executionMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_EXEC); + Map jobContentMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_CONTENT); + + confMap = confMap == null ? new HashMap<>() : confMap; + runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; + varMap = varMap == null ? new HashMap<>() : varMap; + labelMap = labelMap == null ? new HashMap<>() : labelMap; + sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; + executionMap = executionMap == null ? new HashMap<>() : executionMap; + jobContentMap = jobContentMap == null ? new HashMap<>() : jobContentMap; + + confMap = KeyParser.removePrefixForKeysInMap(confMap); + runtimeMap = KeyParser.removePrefixForKeysInMap(runtimeMap); + labelMap = KeyParser.removePrefixForKeysInMap(labelMap); + sourceMap = KeyParser.removePrefixForKeysInMap(sourceMap); + executionMap = KeyParser.removePrefixForKeysInMap(executionMap); + jobContentMap = KeyParser.removePrefixForKeysInMap(jobContentMap); + + for (String key : ctx.getVarAccess().getAllVarKeys()) { + Object val = ctx.getVarAccess().getVar(Object.class, key); + if (!(val instanceof Map) && val != null) { + // note that we allow it to overwrite existing values in map + if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_CONF)) { + KeyParser.removePrefixAndPutValToMap(confMap, key, val, CliKeys.JOB_PARAM_CONF); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_VAR)) { + KeyParser.removePrefixAndPutValToMap(varMap, key, val, CliKeys.JOB_PARAM_VAR); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_RUNTIME)) { + KeyParser.removePrefixAndPutValToMap(runtimeMap, key, val, CliKeys.JOB_PARAM_RUNTIME); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_EXEC)) { + KeyParser.removePrefixAndPutValToMap(executionMap, key, val, CliKeys.JOB_EXEC); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_LABEL)) { + KeyParser.removePrefixAndPutValToMap(labelMap, key, val, CliKeys.JOB_LABEL); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_SOURCE)) { + KeyParser.removePrefixAndPutValToMap(sourceMap, key, val, CliKeys.JOB_SOURCE); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_CONTENT)) { + KeyParser.removePrefixAndPutValToMap(jobContentMap, key, val, CliKeys.JOB_CONTENT); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.LINKIS_CLIENT_COMMON)) { + // do nothing + } else { + // confMap.put(key, stdVarAccess.getVar(Object.class, key)); + } + } + } + + Boolean isAsync = + ctx.getVarAccess().getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + + String creator; + if (!isAsync) { + creator = + ctx.getVarAccess() + .getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_DEFAULT); + } else { + creator = + ctx.getVarAccess() + .getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_ASYNC_DEFAULT); + } + String code = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_EXEC_CODE); + String engineType = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_LABEL_ENGINE_TYPE); + String runType = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_LABEL_CODE_TYPE); + String scriptPath = + ctx.getVarAccess() + .getVarOrDefault(String.class, CliKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); + + String osUser = System.getProperty(CliKeys.LINUX_USER_KEY); + String[] adminUsers = StringUtils.split(CliKeys.ADMIN_USERS, ','); + Set adminSet = new HashSet<>(); + for (String admin : adminUsers) { + adminSet.add(admin); + } + String submitUsr = CliUtils.getSubmitUser(ctx.getVarAccess(), osUser, adminSet); + String proxyUsr = CliUtils.getProxyUser(ctx.getVarAccess(), submitUsr, adminSet); + + String enableExecuteOnce = + ctx.getVarAccess().getVarOrDefault(String.class, CliKeys.JOB_LABEL_EXECUTEONCE, "true"); + // default executeOnce-mode + if (Boolean.parseBoolean(enableExecuteOnce)) { + labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); + } else { + labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); + } + String codePath = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_COMMON_CODE_PATH); + Object extraArgsObj = ctx.getVarAccess().getVar(Object.class, CliKeys.JOB_EXTRA_ARGUMENTS); + if (extraArgsObj != null + && extraArgsObj instanceof String[] + && StringUtils.isBlank(code) + && StringUtils.isBlank(codePath)) { + String[] extraArgs = (String[]) extraArgsObj; + codePath = extraArgs[0]; + if (extraArgs.length > 1) { + runtimeMap.put( + LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); + } + } + + if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { + code = CliUtils.readFile(codePath); + } + + executionMap.put(LinkisKeys.KEY_CODE, code); + labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); + labelMap.put(LinkisKeys.KEY_CODETYPE, runType); + labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); + sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); + runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); + + desc.setVarAccess(ctx.getVarAccess()); + desc.setCreator(creator); + desc.setParamConfMap(confMap); + desc.setParamRunTimeMap(runtimeMap); + desc.setParamVarsMap(varMap); + desc.setLabelMap(labelMap); + desc.setSourceMap(sourceMap); + desc.setExecutionMap(executionMap); + desc.setSubmitUser(submitUsr); + desc.setProxyUser(proxyUsr); + desc.setJobContentMap(jobContentMap); + + OnceJobOper onceJobOper = new OnceJobOper(); + onceJobOper.init(desc); + + return onceJobOper; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java new file mode 100644 index 00000000000..1c17fcd9690 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java @@ -0,0 +1,760 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.ujes; + +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.interactive.InteractiveJobDesc; +import org.apache.linkis.cli.application.operator.ujes.result.OpenLogResult2; +import org.apache.linkis.cli.application.operator.ujes.result.ResultSetResult2; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.common.exception.LinkisException; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.request.JobSubmitAction; +import org.apache.linkis.ujes.client.request.OpenLogAction; +import org.apache.linkis.ujes.client.request.ResultSetAction; +import org.apache.linkis.ujes.client.response.*; + +import org.apache.commons.lang3.StringUtils; + +import java.io.IOException; +import java.text.MessageFormat; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Based on UjesClient */ +public class LinkisJobOper implements JobOper { + protected UJESClient client; + private Logger logger = LoggerFactory.getLogger(LinkisJobOper.class); + private String serverUrl; + + public UJESClient getUJESClient() { + return client; + } + + public void setUJESClient(UJESClient client) { + this.client = client; + } + + public String getServerUrl() { + return serverUrl; + } + + public void setServerUrl(String serverUrl) { + this.serverUrl = serverUrl; + } + + public void close() { + if (null != client) { + try { + client.close(); + } catch (IOException e) { + logger.error("Close error. " + e.getMessage(), e); + } + } + } + + public void checkInit() throws LinkisClientRuntimeException { + if (client == null) { + throw new LinkisClientExecutionException( + "EXE0011", ErrorLevel.ERROR, CommonErrMsg.ExecutionInitErr, "UjesClientDriver is null"); + } + } + + /** + * submit Job but does not query for progress + * + * @param + * @return + */ + public LinkisOperResultAdapter submit(InteractiveJobDesc jobDesc) + throws LinkisClientRuntimeException { + checkInit(); + JobSubmitResult jobSubmitResult; + try { + JobSubmitAction jobSubmitAction = + JobSubmitAction.builder() + .setUser(jobDesc.getSubmitUser()) + .addExecuteUser(jobDesc.getProxyUser()) + .setExecutionContent(jobDesc.getExecutionMap()) + .addExecuteCode((String) jobDesc.getExecutionMap().get(LinkisKeys.KEY_CODE)) + .setStartupParams(jobDesc.getParamConfMap()) + .setRuntimeParams(jobDesc.getParamRunTimeMap()) + .setVariableMap(jobDesc.getParamVarsMap()) + .setLabels(jobDesc.getLabelMap()) + .setSource(jobDesc.getSourceMap()) + .setHeaders(jobDesc.getHeaders()) + .build(); + logger.info("Request info to Linkis: \n{}", CliUtils.GSON.toJson(jobSubmitAction)); + + /* Old API */ + // JobExecuteAction jobExecuteAction = JobExecuteAction.builder() + // .setCreator((String) + // linkisJob.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR)) + // .setUser(linkisJob.getSubmitUser()) + // .addExecuteCode((String) + // linkisJob.getExecutionMap().get(LinkisKeys.KEY_CODE)) + // .setEngineTypeStr((String) + // linkisJob.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE)) + // .setRunTypeStr((String) + // linkisJob.getExecutionMap().get(LinkisKeys.KEY_CODETYPE)) + // .setStartupParams(linkisJob.getParamConfMap()) + // .setVariableMap(linkisJob.getParamVarsMap()) + // .setScriptPath((String) + // linkisJob.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH)) + // .build(); + // logger.debug("Request info to Linkis Old: \n{}", + // Utils.GSON.toJson(jobExecuteAction)); + // jobExecuteResult = client.execute(jobExecuteAction); + + jobSubmitResult = client.submit(jobSubmitAction); + logger.info("Response info from Linkis: \n{}", CliUtils.GSON.toJson(jobSubmitResult)); + + } catch (Exception e) { + // must throw if exception + throw new LinkisClientExecutionException( + "EXE0011", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "Failed to submit job", e); + } + + if (jobSubmitResult == null + || 0 != jobSubmitResult.getStatus() + || StringUtils.isBlank(jobSubmitResult.getTaskID())) { + String reason; + if (jobSubmitResult == null) { + reason = "JobSubmitResult is null"; + } else if (0 != jobSubmitResult.getStatus()) { + reason = "server returns non-zero status-code. "; + reason += jobSubmitResult.getMessage(); + } else { + reason = "server returns blank TaskId"; + } + String msg = MessageFormat.format("Failed to submit job, Reason: {0}", reason); + throw new LinkisClientExecutionException( + "EXE0012", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + + return new UJESResultAdapter(jobSubmitResult); + } + + /** + * loop and get job info until we success and get a valid execID + * + * @param user + * @param taskID + * @return + */ + public LinkisOperResultAdapter queryJobInfo(String user, String taskID) + throws LinkisClientRuntimeException { + if (user == null || taskID == null) { + throw new LinkisClientExecutionException( + "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); + } + return new UJESResultAdapter(queryJobInfoInternal(user, taskID)); + } + + public LinkisOperResultAdapter queryJobStatus(String user, String taskID, String execID) + throws LinkisClientRuntimeException { + if (user == null || taskID == null || execID == null) { + throw new LinkisClientExecutionException( + "EXE0036", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "user or jobID or execID is null"); + } + checkInit(); + JobExecuteResult executeResult = new JobExecuteResult(); + executeResult.setTaskID(taskID); + executeResult.setUser(user); + executeResult.setExecID(execID); + JobStatusResult jobStatusResult = null; + int retryTime = 0; + final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + + while (retryTime++ < MAX_RETRY_TIME) { + try { + jobStatusResult = client.status(executeResult); + logger.debug("job-status: " + CliUtils.GSON.toJson(jobStatusResult)); + if (jobStatusResult == null || 0 != jobStatusResult.getStatus()) { + String reason; + if (jobStatusResult == null) { + reason = "jobStatusResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += jobStatusResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get job status failed. retry time : {0}/{1}. taskID={0}, Reason: {1}", + retryTime, MAX_RETRY_TIME, taskID, reason); + + logger.debug( + "", + new LinkisClientExecutionException( + "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); + } else { + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format( + "Get job status failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); + if (e instanceof LinkisException) { + msg += " " + e.toString(); + } + logger.warn(msg, e); + if (retryTime >= MAX_RETRY_TIME) { + throw new LinkisClientExecutionException( + "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + if (jobStatusResult == null || 0 != jobStatusResult.getStatus()) { + String reason; + if (jobStatusResult == null) { + reason = "jobStatusResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += jobStatusResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get status failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); + throw new LinkisClientExecutionException( + "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + return new UJESResultAdapter(jobStatusResult); + } + + private JobInfoResult queryJobInfoInternal(String user, String taskID) + throws LinkisClientRuntimeException { + checkInit(); + JobExecuteResult executeResult = new JobExecuteResult(); + executeResult.setTaskID(taskID); + executeResult.setUser(user); + JobInfoResult jobInfoResult = null; + int retryTime = 0; + final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + + while (retryTime++ < MAX_RETRY_TIME) { + try { + jobInfoResult = client.getJobInfo(executeResult); + logger.debug("job-info: " + CliUtils.GSON.toJson(jobInfoResult)); + if (jobInfoResult == null || 0 != jobInfoResult.getStatus()) { + String reason; + if (jobInfoResult == null) { + reason = "JobInfoResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += jobInfoResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get job info failed. retry time : {0}/{1}. taskID={2}, Reason: {3}", + retryTime, MAX_RETRY_TIME, taskID, reason); + + logger.debug( + "", + new LinkisClientExecutionException( + "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); + } else { + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format( + "Get job info failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); + if (e instanceof LinkisException) { + msg += " " + e.toString(); + } + logger.warn(msg, e); + if (retryTime >= MAX_RETRY_TIME) { + throw new LinkisClientExecutionException( + "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + if (jobInfoResult == null || 0 != jobInfoResult.getStatus()) { + String reason; + if (jobInfoResult == null) { + reason = "JobInfoResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += jobInfoResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get info failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); + throw new LinkisClientExecutionException( + "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + return jobInfoResult; + } + + public LinkisOperResultAdapter queryRunTimeLogFromLine( + String user, String taskID, String execID, int fromLine) throws LinkisClientRuntimeException { + checkInit(); + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + jobExecuteResult.setUser(user); + jobExecuteResult.setTaskID(taskID); + jobExecuteResult.setExecID(execID); + + JobLogResult logResult = null; + int retryTime = 0; + final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + + while (retryTime++ < MAX_RETRY_TIME) { + try { + logResult = client.log(jobExecuteResult, fromLine, UJESConstants.MAX_LOG_SIZE); + logger.debug("runtime-log-result:" + CliUtils.GSON.toJson(logResult)); + if (logResult == null || 0 != logResult.getStatus()) { + String reason; + if (logResult == null) { + reason = "JobLogResult is null"; + } else { + reason = "server returns non-zero status-code"; + reason += logResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get log failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", + retryTime, MAX_RETRY_TIME, taskID, reason); + logger.debug( + "", + new LinkisClientExecutionException( + "EXE0015", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); + } else { + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format("Get log failed. Retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); + // logger.warn("", new LinkisClientExecutionException("EXE0016", + // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); + if (retryTime >= MAX_RETRY_TIME) { + throw new LinkisClientExecutionException( + "EXE0016", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + if (logResult == null || 0 != logResult.getStatus()) { + String reason; + if (logResult == null) { + reason = "JobLogResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += logResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get log failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); + // logger.warn("", new LinkisClientExecutionException("EXE0016", + // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg)); + throw new LinkisClientExecutionException( + "EXE0016", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + return new UJESResultAdapter(logResult); + } + + public LinkisOperResultAdapter queryPersistedLogFromLine( + String logPath, String user, String taskID, int fromLine) { + return new UJESResultAdapter( + new OpenLogResult2(queryPersistedLogInternal(logPath, user, taskID), fromLine)); + } + + private OpenLogResult queryPersistedLogInternal(String logPath, String user, String taskID) + throws LinkisClientRuntimeException { + checkInit(); + int retryCnt = 0; + final int MAX_RETRY_TIMES = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + OpenLogResult openLogResult = null; + + while (retryCnt++ < MAX_RETRY_TIMES) { + try { + openLogResult = + client.openLog( + OpenLogAction.newBuilder().setLogPath(logPath).setProxyUser(user).build()); + logger.debug("persisted-log-result:" + CliUtils.GSON.toJson(openLogResult)); + if (openLogResult == null + || 0 != openLogResult.getStatus() + || StringUtils.isBlank(openLogResult.getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL])) { + String reason; + if (openLogResult == null) { + reason = "OpenLogResult is null"; + } else if (0 != openLogResult.getStatus()) { + reason = "server returns non-zero status-code. "; + reason += openLogResult.getMessage(); + } else { + reason = "server returns empty log"; + } + String msg = + MessageFormat.format( + "Get log from openLog failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", + retryCnt, MAX_RETRY_TIMES, taskID, reason); + logger.debug(msg); + } else { + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format( + "Get log from openLog failed. retry time : {0}/{1}", retryCnt, MAX_RETRY_TIMES); + if (e instanceof LinkisException) { + msg += " " + e.toString(); + } + logger.debug(msg, e); + if (retryCnt >= MAX_RETRY_TIMES) { + throw new LinkisClientExecutionException( + "EXE0017", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Get log from openLog failed. Retry exhausted. taskID=" + taskID, + e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + if (openLogResult == null + || 0 != openLogResult.getStatus() + || StringUtils.isBlank(openLogResult.getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL])) { + String reason; + if (openLogResult == null) { + reason = "OpenLogResult is null"; + } else if (0 != openLogResult.getStatus()) { + reason = "server returns non-zero status-code"; + } else { + reason = "server returns empty log"; + } + String msg = + MessageFormat.format( + "Get log from openLog failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", + retryCnt, MAX_RETRY_TIMES, taskID, reason); + logger.debug(msg); + if (retryCnt >= MAX_RETRY_TIMES) { + msg = + MessageFormat.format( + "Get log from openLog failed. Retry exhausted. taskID={0}, Reason: {1}", + taskID, reason); + throw new LinkisClientExecutionException( + "EXE0017", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + } + return openLogResult; + } + + public UJESResultAdapter queryProgress(String user, String taskID, String execId) + throws LinkisClientRuntimeException { + checkInit(); + JobExecuteResult executeResult = new JobExecuteResult(); + executeResult.setTaskID(taskID); + executeResult.setUser(user); + executeResult.setExecID(execId); + + JobProgressResult jobProgressResult = null; + int retryTime = 0; + final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + + while (retryTime++ < MAX_RETRY_TIME) { + try { + jobProgressResult = client.progress(executeResult); + if (jobProgressResult == null || 0 != jobProgressResult.getStatus()) { + String reason; + if (jobProgressResult == null) { + reason = "JobProgressResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += jobProgressResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get progress failed. retry time : {0}/{1}. taskID={2}. Reason: {3}", + retryTime, MAX_RETRY_TIME, taskID, reason); + logger.debug(msg); + } else { + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format( + "Get progress failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); + if (e instanceof LinkisException) { + msg += " " + e.toString(); + } + logger.warn(msg, e); + if (retryTime >= MAX_RETRY_TIME) { + throw new LinkisClientExecutionException( + "EXE0019", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Get progress failed. Retry exhausted. taskID=" + taskID, + e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + + if (jobProgressResult == null || 0 != jobProgressResult.getStatus()) { + String reason; + if (jobProgressResult == null) { + reason = "JobProgressResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += jobProgressResult.getMessage(); + } + String msg = + MessageFormat.format( + "Get progress failed. Retry exhausted. taskID={0}, Reason: {1}", taskID, reason); + throw new LinkisClientExecutionException( + "EXE0020", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + + return new UJESResultAdapter(jobProgressResult); + } + + public LinkisOperResultAdapter queryResultSetPaths( + String user, String taskID, String resultLocation) { + checkInit(); + + JobInfoResult jobInfoResult = queryJobInfoInternal(user, taskID); + if (null == jobInfoResult) { + String msg = "Get ResultSet Failed: Cannot get a valid jobInfo"; + logger.error(msg); + throw new LinkisClientExecutionException( + "EXE0021", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + if (!jobInfoResult.isSucceed()) { + String msg = "Get ResultSet Failed: job Status is not \"Succeed\", ."; + throw new LinkisClientExecutionException( + "EXE0021", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + + if (StringUtils.isBlank(jobInfoResult.getRequestPersistTask().getResultLocation())) { + // sometimes server-side does not return this + jobInfoResult.getRequestPersistTask().setResultLocation(resultLocation); + } + + if (StringUtils.isBlank(jobInfoResult.getRequestPersistTask().getResultLocation())) { + throw new LinkisClientExecutionException( + "EXE0021", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "ResultLocation is blank."); + } + + String[] resultSetArray = null; + + int retryTime = 0; + final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + + while (retryTime++ < MAX_RETRY_TIME) { + try { + resultSetArray = jobInfoResult.getResultSetList(client); // this makes call to server + if (resultSetArray == null || 0 == resultSetArray.length) { + String reason; + if (resultSetArray == null) { + reason = "array is null"; + } else { + reason = "array length is zero"; + } + String msg = + MessageFormat.format( + "Get resultSetArray failed. retry time : {0}/{1}. taskID={2} Reason: {3}", + retryTime, MAX_RETRY_TIME, taskID, reason); + logger.debug(msg); + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format( + "Get resultSetArray failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); + if (e instanceof LinkisException) { + msg += " " + e.toString(); + } + logger.warn(msg, e); + if (retryTime >= MAX_RETRY_TIME) { + throw new LinkisClientExecutionException( + "EXE0022", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Get resultSetArray failed. Retry exhausted. taskID=" + taskID, + e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + if (resultSetArray == null || 0 == resultSetArray.length) { + String reason; + if (resultSetArray == null) { + reason = "array is null"; + } else { + reason = "array length is zero"; + } + String msg = + MessageFormat.format( + "Get resultSetArray failed. retry exhausted. taskID={0}. Reason: {1}", + taskID, reason); + logger.warn(msg); + } + return new UJESResultAdapter(resultSetArray); + } + + public LinkisOperResultAdapter queryResultSetGivenResultSetPath( + String[] resultSetPaths, int idxResultSet, String user, Integer page, Integer pageSize) { + checkInit(); + int retryTime = 0; + final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + ResultSetResult result = null; + String resultSetPath = resultSetPaths[idxResultSet]; + while (retryTime++ < MAX_RETRY_TIME) { + try { + ResultSetAction action = + ResultSetAction.builder() + .setPath(resultSetPath) + .setUser(user) + .setPage(page) + .setPageSize(pageSize) + .build(); + result = client.resultSet(action); + logger.debug("resultset-result:" + CliUtils.GSON.toJson(result)); + if (result == null || 0 != result.getStatus()) { + String reason; + if (result == null) { + reason = "array is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); + } + String msg = + MessageFormat.format( + "Get resultSet failed. retry time : {0}/{1}. path={2}, Reason: {3}", + retryTime, MAX_RETRY_TIME, resultSetPath, reason); + logger.debug(msg); + } else { + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format( + "Get resultSet failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); + if (e instanceof LinkisException) { + msg += " " + e.toString(); + } + logger.warn(msg, e); + if (retryTime >= MAX_RETRY_TIME) { + throw new LinkisClientExecutionException( + "EXE0024", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Get resultSet failed. Retry exhausted. path=" + resultSetPath, + e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + if (result == null || 0 != result.getStatus()) { + String reason; + if (result == null) { + reason = "ResultSetResult is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); + } + String msg = + MessageFormat.format( + "Get resultSet failed. Retry exhausted. Path={0}, Reason: {1}", + resultSetPath, reason); + throw new LinkisClientExecutionException( + "EXE0024", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + return new UJESResultAdapter(new ResultSetResult2(idxResultSet, result)); + } + + public LinkisOperResultAdapter kill(String user, String taskId, String execId) + throws LinkisClientRuntimeException { + checkInit(); + + int retryTime = 0; + final int MAX_RETRY_TIME = UJESConstants.DRIVER_REQUEST_MAX_RETRY_TIME; + + JobKillResult result = null; + + while (retryTime++ < MAX_RETRY_TIME) { + try { + JobExecuteResult killRequest = new JobExecuteResult(); + killRequest.setUser(user); + killRequest.setTaskID(taskId); + killRequest.setExecID(execId); + result = client.kill(killRequest); + logger.debug("job-kill-result:" + CliUtils.GSON.toJson(result)); + if (result == null || 0 != result.getStatus()) { + String reason; + if (result == null) { + reason = "result is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); + } + String msg = + MessageFormat.format( + "Kill job failed. retry time : {0}/{1}. taskId={2}, Reason: {3}", + retryTime, MAX_RETRY_TIME, taskId, reason); + logger.debug(msg); + } else { + break; + } + } catch (Exception e) { + String msg = + MessageFormat.format( + "Kill job failed. retry time : {0}/{1}", retryTime, MAX_RETRY_TIME); + if (e instanceof LinkisException) { + msg += " " + e.toString(); + } + logger.warn(msg, e); + if (retryTime >= MAX_RETRY_TIME) { + throw new LinkisClientExecutionException( + "EXE0025", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Kill job failed. taskId={0} Retry exhausted.", + taskId, + e); + } + } + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + } + if (result == null || 0 != result.getStatus()) { + String reason; + if (result == null) { + reason = "result is null"; + } else { + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); + } + String msg = + MessageFormat.format( + "Kill job failed. Retry exhausted. taskId={0}, Reason: {1}", taskId, reason); + throw new LinkisClientExecutionException( + "EXE0025", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } + return new UJESResultAdapter(result); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperBuilder.java new file mode 100644 index 00000000000..318688c5655 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperBuilder.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.ujes; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.operator.JobOperBuilder; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LinkisOperBuilder implements JobOperBuilder { + private static Logger logger = LoggerFactory.getLogger(LinkisOperBuilder.class); + + @Override + public JobOper build(CliCtx ctx) { + LinkisJobOper jobOper = new LinkisJobOper(); + jobOper.setUJESClient(UJESClientFactory.getReusable(ctx.getVarAccess())); + jobOper.setServerUrl( + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL)); + return jobOper; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java index 457a7ba56d5..bd22444a2d6 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java @@ -17,8 +17,8 @@ package org.apache.linkis.cli.application.operator.ujes; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.common.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; import java.util.Date; @@ -52,7 +52,7 @@ public interface LinkisOperResultAdapter { Boolean hasNextLogLine(); - LinkisResultSet getResultContent(); + ResultSet getResultContent(); Boolean resultHasNextPage(); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java new file mode 100644 index 00000000000..23135199fe4 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.ujes; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisConstants; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.BuilderException; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.validate.UJESContextValidator; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.httpclient.authentication.AuthenticationStrategy; +import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy; +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy; +import org.apache.linkis.httpclient.dws.config.DWSClientConfig; +import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.UJESClientImpl; + +import org.apache.commons.lang3.StringUtils; + +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class UJESClientFactory { + private static Logger logger = LoggerFactory.getLogger(UJESClientFactory.class); + + private static UJESClient client; + + public static UJESClient getReusable(VarAccess stdVarAccess) { + if (client == null) { // NOSONAR + synchronized (UJESClientFactory.class) { // NOSONAR + if (client == null) { // NOSONAR + client = getNew(stdVarAccess); + } + } + } + return client; + } + + public static UJESClient getNew(VarAccess stdVarAccess) { + try { + DWSClientConfig config = generateDWSClientConfig(stdVarAccess); + UJESClient ret = new UJESClientImpl(config); + logger.info("Linkis ujes client inited."); + return ret; + } catch (Exception e) { + throw new LinkisClientExecutionException( + "EXE0010", ErrorLevel.ERROR, CommonErrMsg.ExecutionInitErr, "Cannot init UJESClient", e); + } + } + + public static DWSClientConfig generateDWSClientConfig(VarAccess stdVarAccess) { + UJESClientContext context = generateContext(stdVarAccess); + try { + AuthenticationStrategy authenticationStrategy; + if (StringUtils.isBlank(context.getAuthenticationStrategyStr()) + || !LinkisConstants.AUTH_STRATEGY_TOKEN.equalsIgnoreCase( + context.getAuthenticationStrategyStr())) { + authenticationStrategy = + new StaticAuthenticationStrategy(); // this has to be newed here otherwise + // log-in fails for static + } else { + authenticationStrategy = new TokenAuthenticationStrategy(); + } + + DWSClientConfigBuilder builder = DWSClientConfigBuilder.newBuilder(); + DWSClientConfig config = + ((DWSClientConfigBuilder) + (builder + .addServerUrl(context.getGatewayUrl()) + .connectionTimeout(30000) + .discoveryEnabled(false) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(false) + .maxConnectionSize(5) + .retryEnabled(false) + .readTimeout(context.getReadTimeoutMills()) + .setAuthenticationStrategy(authenticationStrategy) + .setAuthTokenKey(context.getTokenKey()) + .setAuthTokenValue(context.getTokenValue()))) + .setDWSVersion(context.getDwsVersion()) + .build(); + + logger.info("Linkis ujes client inited."); + return config; + } catch (Exception e) { + throw new LinkisClientExecutionException( + "EXE0010", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionInitErr, + "Cannot init DWSClientConfig", + e); + } + } + + private static UJESClientContext generateContext(VarAccess stdVarAccess) { + String gatewayUrl = stdVarAccess.getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL); + if (StringUtils.isBlank(gatewayUrl)) { + throw new BuilderException( + "BLD0007", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Cannot build UjesClientDriverContext: gatewayUrl is empty"); + } + + String authKey = stdVarAccess.getVar(String.class, CliKeys.LINKIS_COMMON_TOKEN_KEY); + String authValue = stdVarAccess.getVar(String.class, CliKeys.LINKIS_COMMON_TOKEN_VALUE); + + String authenticationStrategy = + stdVarAccess.getVarOrDefault( + String.class, + CliKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, + LinkisConstants.AUTH_STRATEGY_STATIC); + + long connectionTimeout = + stdVarAccess.getVarOrDefault( + Long.class, CliKeys.UJESCLIENT_COMMON_CONNECTT_TIMEOUT, 30000L); + boolean discoveryEnabled = + stdVarAccess.getVarOrDefault( + Boolean.class, CliKeys.UJESCLIENT_COMMON_DISCOVERY_ENABLED, false); + boolean loadBalancerEnabled = + stdVarAccess.getVarOrDefault( + Boolean.class, CliKeys.UJESCLIENT_COMMON_LOADBALANCER_ENABLED, true); + int maxConnectionSize = + stdVarAccess.getVarOrDefault( + Integer.class, CliKeys.UJESCLIENT_COMMON_MAX_CONNECTION_SIZE, 5); + boolean retryEnabled = + stdVarAccess.getVarOrDefault(Boolean.class, CliKeys.UJESCLIENT_COMMON_RETRY_ENABLED, false); + long readTimeout = + stdVarAccess.getVarOrDefault(Long.class, CliKeys.UJESCLIENT_COMMON_READTIMEOUT, 30000L); + String dwsVersion = + stdVarAccess.getVarOrDefault(String.class, CliKeys.UJESCLIENT_COMMON_DWS_VERSION, "v1"); + + UJESClientContext context = new UJESClientContext(); + + context.setGatewayUrl(gatewayUrl); + context.setAuthenticationStrategyStr(authenticationStrategy); + context.setTokenKey(authKey); + context.setTokenValue(authValue); + context.setConnectionTimeout(connectionTimeout); + context.setDiscoveryEnabled(discoveryEnabled); + context.setLoadBalancerEnabled(loadBalancerEnabled); + context.setMaxConnectionSize(maxConnectionSize); + context.setRetryEnabled(retryEnabled); + context.setReadTimeoutMills(readTimeout); + context.setDwsVersion(dwsVersion); + + logger.info("==========UJES_CTX============\n" + CliUtils.GSON.toJson(context)); + UJESContextValidator ctxValidator = new UJESContextValidator(); + ctxValidator.doValidation(context); + return context; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java index 845949079f5..6166f7b2fef 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java @@ -40,7 +40,4 @@ public class UJESConstants { public static final Integer IDX_FOR_LOG_TYPE_ALL = 3; // 0: Error 1: WARN 2:INFO 3: ALL public static final int DEFAULT_PAGE_SIZE = 500; - - public static final String DEFAULT_SPARK_ENGINE = "spark-2.4.3"; - public static final String DEFAULT_HIVE_ENGINE = "hive-1.2.1"; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java index d402e01fe8c..75582c3ef40 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java @@ -18,17 +18,16 @@ package org.apache.linkis.cli.application.operator.ujes; import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisJobStatus; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.application.operator.OperatorUtils; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.LinkisJobStatus; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; import org.apache.linkis.cli.application.operator.ujes.result.OpenLogResult2; import org.apache.linkis.cli.application.operator.ujes.result.ResultSetResult2; import org.apache.linkis.cli.application.operator.ujes.result.UJESResult; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.utils.CliUtils; import org.apache.linkis.httpclient.dws.response.DWSResult; import org.apache.linkis.ujes.client.request.UserAction; import org.apache.linkis.ujes.client.response.JobInfoResult; @@ -163,6 +162,10 @@ public String getStrongerExecId() { return null; } String execId = null; + + if (result instanceof JobSubmitResult) { + execId = ((JobSubmitResult) result).getExecID(); + } if (result instanceof JobInfoResult) { if (result != null && ((JobInfoResult) result).getTask() != null @@ -170,7 +173,7 @@ public String getStrongerExecId() { execId = (String) ((JobInfoResult) result).getTask().get(LinkisKeys.KEY_STRONGER_EXECID); } } - if (Utils.isValidExecId(execId)) { + if (CliUtils.isValidExecId(execId)) { return execId; } return null; @@ -182,8 +185,7 @@ public Float getJobProgress() { return null; } if (result instanceof JobInfoResult) { - if (((JobInfoResult) result).getRequestPersistTask() != null - && ((JobInfoResult) result).getRequestPersistTask() != null) { + if (((JobInfoResult) result).getRequestPersistTask() != null) { return ((JobInfoResult) result).getRequestPersistTask().getProgress(); } } @@ -277,8 +279,7 @@ public String getLog() { ((OpenLogResult2) result).getResult().getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL]; Integer fromLine = ((OpenLogResult2) result).getFromLine(); return StringUtils.substring( - allLog, - OperatorUtils.getFirstIndexSkippingLines(allLog, fromLine == null ? 0 : fromLine)); + allLog, getFirstIndexSkippingLines(allLog, fromLine == null ? 0 : fromLine)); } return null; } @@ -294,7 +295,7 @@ public Integer getNextLogLine() { if (result instanceof OpenLogResult2 && ((OpenLogResult2) result).getResult() != null && ((OpenLogResult2) result).getResult().getLog() != null) { - return OperatorUtils.getNumOfLines( + return getNumOfLines( ((OpenLogResult2) result).getResult().getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL]); } return null; @@ -315,7 +316,7 @@ public Boolean hasNextLogLine() { } @Override - public LinkisResultSet getResultContent() { + public ResultSet getResultContent() { if (result == null) { return null; } @@ -323,7 +324,7 @@ public LinkisResultSet getResultContent() { && ((ResultSetResult2) result).getResultSetResult() != null && ((ResultSetResult2) result).getResultSetResult().getFileContent() != null && ((ResultSetResult2) result).getResultSetResult().getMetadata() != null) { - LinkisResultSet ret = new LinkisResultSet(); + ResultSet ret = new ResultSet(); ret.setResultsetIdx(((ResultSetResult2) result).getIdxResultSet()); if (((ResultSetResult2) result).getResultSetResult().getMetadata() != null) { ret.setResultMeta( @@ -560,4 +561,54 @@ public String getRequestApplicationName() { } return null; } + + private int getNumOfLines(String str) { + if (str == null || str.length() == 0) { + return 0; + } + int lines = 1; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + lines++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + lines++; + } + } + return lines; + } + + private int getFirstIndexSkippingLines(String str, Integer lines) { + if (str == null || str.length() == 0 || lines < 0) { + return -1; + } + if (lines == 0) { + return 0; + } + + int curLineIdx = 0; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + curLineIdx++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + curLineIdx++; + } else { + continue; + } + + if (curLineIdx >= lines) { + return pos + 1; + } + } + return -1; + } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/HelpPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/HelpPresenter.java new file mode 100644 index 00000000000..f02be7fefc3 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/HelpPresenter.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present; + +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.template.option.MapOption; +import org.apache.linkis.cli.application.interactor.command.template.option.Parameter; +import org.apache.linkis.cli.application.interactor.command.template.option.StdOption; +import org.apache.linkis.cli.application.present.model.HelpInfoModel; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HelpPresenter implements Presenter { + private static Logger logger = LoggerFactory.getLogger(HelpPresenter.class); + + @Override + public void present(Model model) { + if (!(model instanceof HelpInfoModel)) { + throw new PresenterException( + "PST0010", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Input for HelpInfoPresenter is not instance of model"); + } + + HelpInfoModel helpInfoModel = (HelpInfoModel) model; + + String helpInfo = getUsage(helpInfoModel.getTemplate()); + + LoggerManager.getPlaintTextLogger().info(helpInfo); + } + + /** Help info for sub-command */ + private String getUsage(CmdTemplate template) { + StringBuilder sb = new StringBuilder(); + List> options = template.getOptions(); + List> stdOptions = new ArrayList<>(); + List> parameters = new ArrayList<>(); + List> mapOptions = new ArrayList<>(); + for (CmdOption o : options) { + if (o instanceof StdOption) { + stdOptions.add(o); + } else if (o instanceof Parameter) { + parameters.add(o); + } else if (o instanceof MapOption) { + mapOptions.add(o); + } + } + + sb.append("Usage: ") + .append(template.getCmdType().getName()) + .append(options.size() > 0 ? " [OPTIONS] " : " "); + for (CmdOption p : parameters) { + if (p instanceof Parameter) { + sb.append(((Parameter) p).repr()).append(" "); + } + } + if (!"".equals(template.getCmdType().getDesc())) { + sb.append("\n\t").append(template.getCmdType().getDesc()); + } + + sb.append(options.size() > 0 ? "\nOptions:\n" : "\n"); + for (CmdOption o : stdOptions) { + sb.append(o.toString()).append("\n"); + } + + sb.append(options.size() > 0 ? "\nMapOptions:\n" : "\n"); + for (CmdOption o : mapOptions) { + sb.append(o.toString()).append("\n"); + } + + sb.append(parameters.size() > 0 ? "Parameters:\n" : "\n"); + for (CmdOption p : parameters) { + sb.append(p.toString()).append("\n"); + } + + return sb.toString(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/JobInfoPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/JobInfoPresenter.java new file mode 100644 index 00000000000..3a264f827c0 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/JobInfoPresenter.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present; + +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +public class JobInfoPresenter implements Presenter { + @Override + public void present(Model model) { + LoggerManager.getPlaintTextLogger().info(CliUtils.GSON.toJson(model)); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/LogPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/LogPresenter.java new file mode 100644 index 00000000000..9710be95db9 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/LogPresenter.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present; + +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; +import org.apache.linkis.cli.application.present.model.LinkisLogModel; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LogPresenter implements Presenter, LinkisClientListener { + private static Logger logger = LoggerFactory.getLogger(LogPresenter.class); + + @Override + public void update(LinkisClientEvent event, Object msg) { + Model model = new LinkisLogModel(); + model.buildModel(msg); + this.present(model); + } + + @Override + public void present(Model model) { + if (!(model instanceof LinkisLogModel)) { + throw new PresenterException( + "PST0001", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Input model for \"LinkisLogPresenter\" is not instance of \"LinkisJobIncLogModel\""); + } + LinkisLogModel logModel = (LinkisLogModel) model; + while (!logModel.logFinReceived()) { + String incLog = logModel.consumeLog(); + if (StringUtils.isNotEmpty(incLog)) { + LoggerManager.getPlaintTextLogger().info(incLog); + } + CliUtils.doSleepQuietly(500l); + } + String incLog = logModel.consumeLog(); + if (StringUtils.isNotEmpty(incLog)) { + LoggerManager.getPlaintTextLogger().info(incLog); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/ResultPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/ResultPresenter.java new file mode 100644 index 00000000000..3c7281a7eb7 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/ResultPresenter.java @@ -0,0 +1,301 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; +import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; +import org.apache.linkis.cli.application.present.file.ResultFileWriter; +import org.apache.linkis.cli.application.present.model.LinkisResultModel; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; + +import java.text.MessageFormat; +import java.util.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ResultPresenter implements Presenter, LinkisClientListener { + private static Logger logger = LoggerFactory.getLogger(ResultPresenter.class); + private Boolean writeToFile = false; + private String filePath = ""; + + public ResultPresenter() {} + + public ResultPresenter(Boolean writeToFile, String filePath) { + this.writeToFile = writeToFile; + this.filePath = filePath; + } + + @Override + public void update(LinkisClientEvent event, Object msg) { + Model model = new LinkisResultModel(); + model.buildModel(msg); + this.present(model); + } + + @Override + public void present(Model model) { + if (!(model instanceof LinkisResultModel)) { + throw new PresenterException( + "PST0001", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Input model for \"LinkisResultPresenter\" is not instance of \"LinkisResultModel\""); + } + LinkisResultModel resultModel = (LinkisResultModel) model; + + LoggerManager.getPlaintTextLogger().info(formatResultIndicator(resultModel)); + + if (!resultModel.getJobStatus().isJobSuccess()) { + LoggerManager.getInformationLogger() + .info("JobStatus is not \'success\'. Will not retrieve result-set."); + return; + } + String msg = ""; + if (resultModel.hasResult()) { + msg = + "Retrieving result-set, may take time if result-set is large, please do not exit program."; + } else { + msg = "Your job has no result."; + } + LoggerManager.getInformationLogger().info(msg); + + int preIdx = -1; + StringBuilder resultSb = new StringBuilder(); + + while (!resultModel.resultFinReceived()) { + preIdx = presentOneIteration(resultModel, preIdx, resultSb); + CliUtils.doSleepQuietly(500l); + } + presentOneIteration(resultModel, preIdx, resultSb); + + if (writeToFile) { + LoggerManager.getInformationLogger() + .info("ResultSet has been successfully written to path: " + filePath); + } + } + + protected int presentOneIteration( + LinkisResultModel resultModel, int preIdx, StringBuilder resultSb) { + List resultSets = resultModel.consumeResultContent(); + if (resultSets != null && !resultSets.isEmpty()) { + for (ResultSet c : resultSets) { + int idxResultset = c.getResultsetIdx(); + /** + * Notice: we assume result-sets are visited one by one in non-descending order!!! i.e. + * either idxResultset == preIdx or idxResultset - preIdx == 1 i.e. resultsets[0] -> + * resultsets[1] -> ... + */ + if (idxResultset - preIdx != 0 && idxResultset - preIdx != 1) { + throw new PresenterException( + "PST0002", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Linkis resultsets are visited in descending order or are not visited one-by-one"); + } + + boolean flag = idxResultset > preIdx; + if (idxResultset - preIdx == 1) { + resultSb.setLength(0); + resultSb + .append(MessageFormat.format(CliConstants.RESULTSET_LOGO, idxResultset + 1)) + .append(System.lineSeparator()); + if (c.getResultMeta() != null) { + resultSb.append(CliConstants.RESULTSET_META_BEGIN_LOGO).append(System.lineSeparator()); + resultSb.append(formatResultMeta(c.getResultMeta())); + resultSb.append(CliConstants.RESULTSET_META_END_LOGO).append(System.lineSeparator()); + } + } + preIdx = idxResultset; + String contentStr = formatResultContent(c.getResultMeta(), c.getContent()); + if (contentStr != null) { + resultSb.append(contentStr); + } + if (resultSb.length() != 0) { + if (writeToFile) { + String resultFileName = + resultModel.getUser() + + "-task-" + + resultModel.getJobID() + + "-result-" + + String.valueOf(idxResultset + 1) + + ".txt"; + ResultFileWriter.writeToFile(filePath, resultFileName, resultSb.toString(), flag); + } else { + LoggerManager.getPlaintTextLogger().info(resultSb.toString()); + } + resultSb.setLength(0); + } + } + } + return preIdx; + } + + protected String formatResultMeta(List> metaData) { + + StringBuilder outputBuilder = new StringBuilder(); + + if (metaData == null || metaData.size() == 0) { + return null; + } + + List titles = new ArrayList<>(); + + // gather keys as title + for (LinkedHashMap mapElement : metaData) { + if (mapElement == null || mapElement.size() == 0) { + continue; + } + + Set> entrySet = mapElement.entrySet(); + if (entrySet == null) { + break; + } + for (Map.Entry entry : entrySet) { + String key = entry.getKey(); + if (key != null && !titles.contains(key)) { + titles.add(key); + outputBuilder.append(key).append("\t"); + } + } + } + + outputBuilder.append(System.lineSeparator()); + + // gather value and print to output + for (LinkedHashMap mapElement : metaData) { + if (mapElement == null || mapElement.size() == 0) { + continue; + } + String candidate; + for (String title : titles) { + if (mapElement.containsKey(title)) { + candidate = mapElement.get(title); + } else { + candidate = "NULL"; + } + outputBuilder.append(candidate).append("\t"); + } + outputBuilder.append(System.lineSeparator()); + } + return outputBuilder.toString(); + } + + protected String formatResultContent( + List> metaData, List> contentData) { + + StringBuilder outputBuilder = new StringBuilder(); + if (contentData == null || contentData.size() == 0) { // finished + return null; + } + + int listLen = contentData.size(); + for (int i = 0; i < listLen; i++) { + List listElement = contentData.get(i); + if (listElement == null || listElement.size() == 0) { + continue; + } + for (String element : listElement) { + outputBuilder.append(element).append("\t"); + } + if (i < listLen - 1) { + outputBuilder.append(System.lineSeparator()); + } + } + + return outputBuilder.toString(); + } + + protected String formatResultIndicator(LinkisResultModel model) { + StringBuilder infoBuilder = new StringBuilder(); + String extraMsgStr = ""; + + if (model.getExtraMessage() != null) { + extraMsgStr = model.getExtraMessage(); + } + if (model.getJobStatus().isJobSuccess()) { + + LoggerManager.getInformationLogger() + .info("Job execute successfully! Will try get execute result"); + infoBuilder + .append("============Result:================") + .append(System.lineSeparator()) + .append("TaskId:") + .append(model.getJobID()) + .append(System.lineSeparator()) + .append("ExecId: ") + .append(model.getExecID()) + .append(System.lineSeparator()) + .append("User:") + .append(model.getUser()) + .append(System.lineSeparator()) + .append("Current job status:") + .append(model.getJobStatus()) + .append(System.lineSeparator()) + .append("extraMsg: ") + .append(extraMsgStr) + .append(System.lineSeparator()) + .append("result: ") + .append(extraMsgStr) + .append(System.lineSeparator()); + } else if (model.getJobStatus().isJobFinishedState()) { + LoggerManager.getInformationLogger().info("Job failed! Will not try get execute result."); + infoBuilder + .append("============Result:================") + .append(System.lineSeparator()) + .append("TaskId:") + .append(model.getJobID()) + .append(System.lineSeparator()) + .append("ExecId: ") + .append(model.getExecID()) + .append(System.lineSeparator()) + .append("User:") + .append(model.getUser()) + .append(System.lineSeparator()) + .append("Current job status:") + .append(model.getJobStatus()) + .append(System.lineSeparator()) + .append("extraMsg: ") + .append(extraMsgStr) + .append(System.lineSeparator()); + if (model.getErrCode() != null) { + infoBuilder.append("errCode: ").append(model.getErrCode()).append(System.lineSeparator()); + } + if (StringUtils.isNotBlank(model.getErrDesc())) { + infoBuilder.append("errDesc: ").append(model.getErrDesc()).append(System.lineSeparator()); + } + } else { + throw new PresenterException( + "PST0011", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Job is not completed but triggered ResultPresenter"); + } + return infoBuilder.toString(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java new file mode 100644 index 00000000000..0a948991c9c --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present.file; + +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileOutputStream; +import java.io.OutputStreamWriter; + +public class ResultFileWriter { + + public static void writeToFile( + String pathName, String fileName, String content, Boolean overWrite) { + + File dir = new File(pathName); + + if (!dir.exists()) { + try { + dir.mkdirs(); + } catch (Exception e) { + throw new PresenterException( + "PST0005", + ErrorLevel.ERROR, + CommonErrMsg.PresentDriverErr, + "Cannot mkdir for path: " + dir.getAbsolutePath(), + e); + } + } + + File file = new File(dir.getAbsolutePath() + File.separator + fileName); + + if (overWrite || !file.exists()) { + try { + if (!file.createNewFile()) { + throw new PresenterException( + "PST0006", + ErrorLevel.ERROR, + CommonErrMsg.PresentDriverErr, + "Cannot create file for path: " + file.getAbsolutePath()); + } + } catch (Exception e) { + throw new PresenterException( + "PST0006", + ErrorLevel.ERROR, + CommonErrMsg.PresentDriverErr, + "Cannot create file for path: " + file.getAbsolutePath(), + e); + } + } + + FileOutputStream fos = null; + OutputStreamWriter osWritter = null; + BufferedWriter bufferedWriter = null; + try { + fos = new FileOutputStream(file, !overWrite); + osWritter = new OutputStreamWriter(fos, "UTF-8"); + bufferedWriter = new BufferedWriter(osWritter, 1024); + bufferedWriter.write(content + "\n"); + } catch (Exception e) { + throw new PresenterException( + "PST0007", + ErrorLevel.ERROR, + CommonErrMsg.PresentDriverErr, + "Cannot write: " + file.getAbsolutePath(), + e); + + } finally { + if (bufferedWriter != null) { + try { + bufferedWriter.close(); + } catch (Exception e) { + // ignore + } + } + if (osWritter != null) { + try { + osWritter.close(); + } catch (Exception e) { + // ignore + } + } + if (fos != null) { + try { + fos.close(); + } catch (Exception e) { + // ignore + } + } + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/model/HelpInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/HelpInfoModel.java similarity index 77% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/model/HelpInfoModel.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/HelpInfoModel.java index c2b5f5e7727..46dcec2d5e5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/model/HelpInfoModel.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/HelpInfoModel.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present.model; +package org.apache.linkis.cli.application.present.model; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; public class HelpInfoModel implements Model { CmdTemplate template; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java new file mode 100644 index 00000000000..b428a788dde --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present.model; + +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; + +import java.util.Date; + +public class LinkisJobInfoModel implements Model { + + private String cid; + private String jobId; + private String message; + private String exception; + private String cause; + + private String taskID; + private String instance; + private String simpleExecId; + private String execId; + private String umUser; + private String executionCode; + private String logPath; + private JobStatus status; + private String engineType; + private String runType; + private Long costTime; + private Date createdTime; + private Date updatedTime; + private Date engineStartTime; + private Integer errCode; + private String errMsg; + private String executeApplicationName; + private String requestApplicationName; + private Float progress; + + @Override + public void buildModel(Object data) { + if (!(data instanceof LinkisOperResultAdapter)) { + throw new TransformerException( + "TFM0010", + ErrorLevel.ERROR, + CommonErrMsg.TransformerException, + "Failed to init LinkisJobInfoModel: " + + data.getClass().getCanonicalName() + + "is not instance of \"LinkisOperResultAdapter\""); + } + this.jobId = ((LinkisOperResultAdapter) data).getJobID(); + this.taskID = ((LinkisOperResultAdapter) data).getJobID(); + this.instance = ((LinkisOperResultAdapter) data).getInstance(); + this.simpleExecId = ((LinkisOperResultAdapter) data).getSimpleExecId(); + this.execId = ((LinkisOperResultAdapter) data).getStrongerExecId(); + this.umUser = ((LinkisOperResultAdapter) data).getUmUser(); + this.executionCode = ((LinkisOperResultAdapter) data).getExecutionCode(); + this.logPath = ((LinkisOperResultAdapter) data).getLogPath(); + this.status = ((LinkisOperResultAdapter) data).getJobStatus(); + this.engineType = ((LinkisOperResultAdapter) data).getEngineType(); + this.runType = ((LinkisOperResultAdapter) data).getRunType(); + this.costTime = ((LinkisOperResultAdapter) data).getCostTime(); + this.createdTime = ((LinkisOperResultAdapter) data).getCreatedTime(); + this.updatedTime = ((LinkisOperResultAdapter) data).getUpdatedTime(); + this.engineStartTime = ((LinkisOperResultAdapter) data).getEngineStartTime(); + this.errCode = ((LinkisOperResultAdapter) data).getErrCode(); + this.errMsg = ((LinkisOperResultAdapter) data).getErrDesc(); + this.executeApplicationName = ((LinkisOperResultAdapter) data).getExecuteApplicationName(); + this.requestApplicationName = ((LinkisOperResultAdapter) data).getRequestApplicationName(); + this.progress = ((LinkisOperResultAdapter) data).getJobProgress(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java new file mode 100644 index 00000000000..e5805478872 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present.model; + +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.LogData; + +public class LinkisLogModel implements Model { + private LogData data; + + @Override + public void buildModel(Object data) { + if (!(data instanceof LogData)) { + throw new TransformerException( + "TFM0010", + ErrorLevel.ERROR, + CommonErrMsg.TransformerException, + "Failed to init LinkisLogModel: " + + data.getClass().getCanonicalName() + + "is not instance of \"LinkisLogData\""); + } + this.data = (LogData) data; + } + + public String consumeLog() { + return data.consumeLog(); + } + + public boolean logFinReceived() { + return data.isLogFin(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java new file mode 100644 index 00000000000..fd9e8eb9a2c --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present.model; + +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.ResultData; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; + +import java.util.List; + +public class LinkisResultModel implements Model { + private ResultData data; + + @Override + public void buildModel(Object data) { + if (!(data instanceof ResultData)) { + throw new TransformerException( + "TFM0010", + ErrorLevel.ERROR, + CommonErrMsg.TransformerException, + "Failed to init LinkisResultModel: " + + data.getClass().getCanonicalName() + + "is not instance of \"LinkisResultData\""); + } + this.data = (ResultData) data; + } + + public List consumeResultContent() { + return data.consumeResultContent(); + } + + public boolean resultFinReceived() { + return data.isResultFin(); + } + + public JobStatus getJobStatus() { + return data.getJobStatus(); + } + + public String getJobID() { + return data.getJobID(); + } + + public String getUser() { + return data.getUser(); + } + + public boolean hasResult() { + return data.hasResult(); + } + + public String getExecID() { + return data.getExecID(); + } + + public Integer getErrCode() { + return data.getErrCode(); + } + + public String getErrDesc() { + return data.getErrDesc(); + } + + public String getExtraMessage() { + return data.getExtraMessage(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/CliUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/CliUtils.java new file mode 100644 index 00000000000..a4c3e621519 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/CliUtils.java @@ -0,0 +1,268 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.utils; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisConstants; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.BuilderException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; + +import org.apache.commons.lang3.StringUtils; + +import java.io.*; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +public class CliUtils { + public static final Gson GSON = + new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); + + public static String getSubmitUser(VarAccess stdVarAccess, String osUser, Set adminSet) { + + String enableSpecifyUserStr = + stdVarAccess.getVar( + String.class, CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_USER_SPECIFICATION); + Boolean enableSpecifyUser = + Boolean.parseBoolean(enableSpecifyUserStr) || adminSet.contains(osUser); + String authenticationStrategy = + stdVarAccess.getVarOrDefault( + String.class, + CliKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, + LinkisConstants.AUTH_STRATEGY_STATIC); + + String submitUsr; + if (StringUtils.equalsIgnoreCase(authenticationStrategy, LinkisConstants.AUTH_STRATEGY_TOKEN)) { + /* + default -> use current os user + enableSpecifyUser -> -submitUser + enableSpecifyProxyUser -> -proxyUser + ADMIN_USERS can do anything + */ + if (enableSpecifyUser) { + if (stdVarAccess.hasVar(CliKeys.JOB_COMMON_SUBMIT_USER)) { + submitUsr = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_SUBMIT_USER); + if (!adminSet.contains(osUser) && adminSet.contains(submitUsr)) { + throw new BuilderException( + "BLD0010", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Cannot specify admin-user as submit-user"); + } + } else { + submitUsr = osUser; + LoggerManager.getInformationLogger() + .info( + "user does not specify submit-user, will use current Linux user \"" + + osUser + + "\" by default."); + } + } else if (stdVarAccess.hasVar(CliKeys.JOB_COMMON_SUBMIT_USER)) { + submitUsr = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_SUBMIT_USER); + if (!StringUtils.equals(submitUsr, osUser)) { + throw new BuilderException( + "BLD0010", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Cannot specify submit-user when user-specification switch is off"); + } + } else { + submitUsr = osUser; + LoggerManager.getInformationLogger() + .info( + "user does not specify submit-user, will use current Linux user \"" + + osUser + + "\" by default."); + } + } else if (StringUtils.equalsIgnoreCase( + authenticationStrategy, LinkisConstants.AUTH_STRATEGY_STATIC)) { + String authKey = stdVarAccess.getVar(String.class, CliKeys.LINKIS_COMMON_TOKEN_KEY); + String submitUsrInput = + stdVarAccess.getVarOrDefault(String.class, CliKeys.JOB_COMMON_SUBMIT_USER, authKey); + if (StringUtils.equalsIgnoreCase(submitUsrInput, authKey)) { + submitUsr = authKey; + } else { + throw new BuilderException( + "BLD0011", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Submit-User should be the same as Auth-Key under Static-Authentication-Strategy \'"); + } + } else { + throw new BuilderException( + "BLD0011", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Authentication strategy \'" + authenticationStrategy + "\' is not supported"); + } + + return submitUsr; + } + + public static String getProxyUser( + VarAccess stdVarAccess, String submitUsr, Set adminSet) { + + String enableSpecifyPRoxyUserStr = + stdVarAccess.getVar(String.class, CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_PROXY_USER); + Boolean enableSpecifyProxyUser = + Boolean.parseBoolean(enableSpecifyPRoxyUserStr) || adminSet.contains(submitUsr); + + /* + default -> use current -submitUser user + enableSpecifyUser -> -submitUser + enableSpecifyProxyUser -> -proxyUser + ADMIN_USERS can do anything + */ + String proxyUsr; + + if (enableSpecifyProxyUser) { + if (stdVarAccess.hasVar(CliKeys.JOB_COMMON_PROXY_USER)) { + proxyUsr = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_PROXY_USER); + if (!adminSet.contains(submitUsr) && adminSet.contains(proxyUsr)) { + throw new BuilderException( + "BLD0010", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Cannot specify admin-user as proxy-user"); + } + } else { + proxyUsr = submitUsr; + LoggerManager.getInformationLogger() + .info( + "user does not specify proxy-user, will use current submit-user \"" + + submitUsr + + "\" by default."); + } + } else if (stdVarAccess.hasVar(CliKeys.JOB_COMMON_PROXY_USER)) { + proxyUsr = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_PROXY_USER); + if (!StringUtils.equals(proxyUsr, submitUsr)) { + throw new BuilderException( + "BLD0010", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Cannot specify proxy-user when proxy-user-specification switch is off"); + } + } else { + proxyUsr = submitUsr; + LoggerManager.getInformationLogger() + .info( + "user does not specify proxy-user, will use current submit-user \"" + + proxyUsr + + "\" by default."); + } + return proxyUsr; + } + + public static String readFile(String path) { + File inputFile = new File(path); + try (InputStream inputStream = new FileInputStream(inputFile); + InputStreamReader iReader = new InputStreamReader(inputStream); + BufferedReader bufReader = new BufferedReader(iReader)) { + StringBuilder sb = new StringBuilder(); + StringBuilder line; + while (bufReader.ready()) { + line = new StringBuilder(bufReader.readLine()); + sb.append(line).append(System.lineSeparator()); + } + + return sb.toString(); + + } catch (FileNotFoundException fe) { + throw new BuilderException( + "BLD0005", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "User specified script file does not exist: " + path, + fe); + } catch (Exception e) { + throw new BuilderException( + "BLD0006", + ErrorLevel.ERROR, + CommonErrMsg.BuilderBuildErr, + "Cannot read user specified script file: " + path, + e); + } + } + + public static T castStringToAny(Class clazz, String val) { + if (StringUtils.isBlank(val)) { + return null; + } + T ret = null; + if (clazz == Object.class) { + ret = clazz.cast(val); + } else if (clazz == String.class) { + ret = clazz.cast(val); + } else if (clazz == Integer.class) { + ret = clazz.cast(Integer.parseInt(val)); + } else if (clazz == Double.class) { + ret = clazz.cast(Double.parseDouble(val)); + } else if (clazz == Float.class) { + ret = clazz.cast(Float.parseFloat(val)); + } else if (clazz == Long.class) { + ret = clazz.cast(Long.parseLong(val)); + } else if (clazz == Boolean.class) { + ret = clazz.cast(Boolean.parseBoolean(val)); + } + return ret; + } + + public static Map parseKVStringToMap(String kvStr, String separator) { + if (StringUtils.isBlank(separator)) { + separator = ","; + } + if (StringUtils.isBlank(kvStr)) { + return null; + } + Map argsProps = new HashMap<>(); + String[] args = StringUtils.splitByWholeSeparator(kvStr, separator); + for (String arg : args) { + int index = arg.indexOf("="); + if (index != -1) { + argsProps.put(arg.substring(0, index).trim(), arg.substring(index + 1).trim()); + } + } + + return argsProps; + } + + public static boolean isValidExecId(String execId) { + boolean ret = false; + if (StringUtils.isNotBlank(execId)) { + ret = true; + } + return ret; + } + + public static String progressInPercentage(float progress) { + return String.valueOf(progress * 100) + "%"; + } + + public static void doSleepQuietly(Long sleepMills) { + try { + Thread.sleep(sleepMills); + } catch (Exception ignore) { + // ignored + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/LoggerManager.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/LoggerManager.java new file mode 100644 index 00000000000..bf302e9e856 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/LoggerManager.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LoggerManager { + private static final Logger PlainTextLogger = LoggerFactory.getLogger("PlaintTextLogger"); + private static final Logger InformationLogger = LoggerFactory.getLogger("InformationLogger"); + + public static Logger getPlaintTextLogger() { + return PlainTextLogger; + } + + public static Logger getInformationLogger() { + return InformationLogger; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/SchedulerManager.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/SchedulerManager.java new file mode 100644 index 00000000000..19e7a1257d3 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/SchedulerManager.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.utils; + +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; + +public class SchedulerManager { + private static ExecutorService fixedThreadPool; + private static ThreadPoolExecutor cachedThreadPool; + private static int THREAD_NUM = 5; + private static String THREAD_NAME = "LinkisCli-Scheduler"; + private static Boolean IS_DEAMON = false; + + public static ThreadFactory threadFactory(String threadName, Boolean isDaemon) { + return new ThreadFactory() { + AtomicInteger num = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(r); + t.setDaemon(isDaemon); + t.setName(threadName + num.incrementAndGet()); + return t; + } + }; + } + + public static ThreadPoolExecutor newCachedThreadPool( + int threadNum, String threadName, Boolean isDaemon) { + ThreadPoolExecutor threadPool = + new ThreadPoolExecutor( + threadNum, + threadNum, + 120L, + TimeUnit.SECONDS, + new LinkedBlockingQueue(10 * threadNum), + threadFactory(threadName, isDaemon)); + threadPool.allowCoreThreadTimeOut(true); + return threadPool; + } + + public static ExecutorService newFixedThreadPool( + int threadNum, String threadName, Boolean isDaemon) { + return Executors.newFixedThreadPool(threadNum, threadFactory(threadName, isDaemon)); + } + + public static ThreadPoolExecutor getCachedThreadPoolExecutor() { + if (cachedThreadPool == null) { // NOSONAR + synchronized (SchedulerManager.class) { // NOSONAR + if (cachedThreadPool == null) { // NOSONAR + cachedThreadPool = newCachedThreadPool(THREAD_NUM, THREAD_NAME, IS_DEAMON); + } + } + } + return cachedThreadPool; + } + + public static ExecutorService getFixedThreadPool() { + if (fixedThreadPool == null) { // NOSONAR + synchronized (SchedulerManager.class) { // NOSONAR + if (fixedThreadPool == null) { // NOSONAR + fixedThreadPool = newFixedThreadPool(THREAD_NUM, THREAD_NAME, IS_DEAMON); + } + } + } + return fixedThreadPool; + } + + public static void shutDown() { + if (fixedThreadPool != null) { + fixedThreadPool.shutdownNow(); + } + if (cachedThreadPool != null) { + cachedThreadPool.shutdownNow(); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/resources/version.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/main/resources/version.properties new file mode 100644 index 00000000000..0da37e5c037 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/resources/version.properties @@ -0,0 +1,16 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +cli.version=${project.version} \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java index 0af22266615..d7ca2c0349c 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java @@ -28,26 +28,36 @@ public class LinkisClientApplicationTest { String[] cmdStr; String[] cmdStr2; + String[] cmdStr3; @BeforeEach public void before() { System.setProperty("conf.root", "src/test/resources/conf/"); System.setProperty("user.name", "hadoop"); + cmdStr3 = new String[] {"--version"}; cmdStr2 = new String[] { - // "--gatewayUrl", "http://127.0.0.1:8090", - // "--authStg", "token", - // "--authKey", "Validation-Code", - // "--authVal", "BML-AUTH", - // "job", - // "kill", - // "-j", "1121", - // "-submitUser", "user", - // "-proxyUser", "user", - - // "-varMap", "name=\"tables\"", - // "-varMap", "name=\"databases\"" - + "--gatewayUrl", + "http://127.0.0.1:9001", + "--authStg", + "token", + "--authKey", + "Validation-Code", + "--authVal", + "BML-AUTH", + "--status", + // "--log", + // "--kill", + // "--result", + "5773107", + "-submitUser", + "hadoop", + "-proxyUser", + "hadoop", + "-varMap", + "name=\"tables\"", + "-varMap", + "name2=\"databases\"" }; cmdStr = new String[] { @@ -99,6 +109,7 @@ public void before() { "-codeType", "shell", "-code", + // "exit -1", "whoami", // "-engineType", "spark-2.4.3", @@ -165,7 +176,8 @@ public void testProcessInput() { /** Method: exec(ProcessedData data) */ @Test public void testExec() { - // LinkisClientApplication.main(cmdStr); + // LinkisClientApplication.main(cmdStr); + LinkisClientApplication.main(cmdStr3); // LinkisClientApplication.main(cmdStr2); /* try { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliConstantsTest.java new file mode 100644 index 00000000000..b576c89f4f3 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliConstantsTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.constants; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class CliConstantsTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String defaultConfigName = CliConstants.DEFAULT_CONFIG_NAME; + Long jobQuerySleepMills = CliConstants.JOB_QUERY_SLEEP_MILLS; + String resultsetLogo = CliConstants.RESULTSET_LOGO; + String resultsetMetaBeginLogo = CliConstants.RESULTSET_META_BEGIN_LOGO; + String resultsetMetaEndLogo = CliConstants.RESULTSET_META_END_LOGO; + String resultsetSeparatorLogo = CliConstants.RESULTSET_SEPARATOR_LOGO; + Integer resultsetPageSize = CliConstants.RESULTSET_PAGE_SIZE; + String jobCreatorDefault = CliConstants.JOB_CREATOR_DEFAULT; + String jobCreatorAsyncDefault = CliConstants.JOB_CREATOR_ASYNC_DEFAULT; + String dummyCid = CliConstants.DUMMY_CID; + String linkisCli = CliConstants.LINKIS_CLI; + String ujesMode = CliConstants.UJES_MODE; + String onceMode = CliConstants.ONCE_MODE; + + Assertions.assertEquals("linkis-cli.properties", defaultConfigName); + Assertions.assertTrue(2000L == jobQuerySleepMills.longValue()); + Assertions.assertEquals("============ RESULT SET {0} ============", resultsetLogo); + Assertions.assertEquals("----------- META DATA ------------", resultsetMetaBeginLogo); + Assertions.assertEquals("------------ END OF META DATA ------------", resultsetMetaEndLogo); + Assertions.assertEquals("------------------------", resultsetSeparatorLogo); + Assertions.assertTrue(5000 == resultsetPageSize.intValue()); + Assertions.assertEquals("LINKISCLI", jobCreatorDefault); + Assertions.assertEquals("LINKISCLIASYNC", jobCreatorAsyncDefault); + Assertions.assertEquals("dummy", dummyCid); + Assertions.assertEquals("LinkisCli", linkisCli); + Assertions.assertEquals("ujes", ujesMode); + Assertions.assertEquals("once", onceMode); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliKeysTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliKeysTest.java new file mode 100644 index 00000000000..3fc7b84a0aa --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliKeysTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.constants; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class CliKeysTest { + + @Test + @DisplayName("constTest") + public void constTest() { + + String adminUsers = CliKeys.ADMIN_USERS; + String linkisClientNoncustomizable = CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE; + String logPathKey = CliKeys.LOG_PATH_KEY; + String logFileKey = CliKeys.LOG_FILE_KEY; + String clientConfigRootKey = CliKeys.CLIENT_CONFIG_ROOT_KEY; + String defaultConfigFileNameKey = CliKeys.DEFAULT_CONFIG_FILE_NAME_KEY; + String linuxUserKey = CliKeys.LINUX_USER_KEY; + String jobExec = CliKeys.JOB_EXEC; + String jobExecCode = CliKeys.JOB_EXEC_CODE; + String jobContent = CliKeys.JOB_CONTENT; + String jobSource = CliKeys.JOB_SOURCE; + String jobParamConf = CliKeys.JOB_PARAM_CONF; + String jobParamRuntime = CliKeys.JOB_PARAM_RUNTIME; + String jobParamVar = CliKeys.JOB_PARAM_VAR; + String jobLabel = CliKeys.JOB_LABEL; + + Assertions.assertEquals("hadoop,root,shangda", adminUsers); + Assertions.assertEquals("wds.linkis.client.noncustomizable", linkisClientNoncustomizable); + Assertions.assertEquals("log.path", logPathKey); + Assertions.assertEquals("log.file", logFileKey); + Assertions.assertEquals("conf.root", clientConfigRootKey); + Assertions.assertEquals("conf.file", defaultConfigFileNameKey); + Assertions.assertEquals("user.name", linuxUserKey); + + Assertions.assertEquals("wds.linkis.client.exec", jobExec); + Assertions.assertEquals("wds.linkis.client.exec.code", jobExecCode); + Assertions.assertEquals("wds.linkis.client.jobContent", jobContent); + Assertions.assertEquals("wds.linkis.client.source", jobSource); + + Assertions.assertEquals("wds.linkis.client.param.conf", jobParamConf); + Assertions.assertEquals("wds.linkis.client.param.runtime", jobParamRuntime); + Assertions.assertEquals("wds.linkis.client.param.var", jobParamVar); + + Assertions.assertEquals("wds.linkis.client.label", jobLabel); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/CliCmdTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/CliCmdTypeTest.java new file mode 100644 index 00000000000..fecda11db76 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/CliCmdTypeTest.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.command; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class CliCmdTypeTest { + + @Test + @DisplayName("enumTest") + public void enumTest() { + + int id = CliCmdType.UNIVERSAL.getId(); + String name = CliCmdType.UNIVERSAL.getName(); + String desc = CliCmdType.UNIVERSAL.getDesc(); + + Assertions.assertTrue(1 == id); + Assertions.assertEquals("linkis-cli", name); + Assertions.assertNotNull(desc); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java new file mode 100644 index 00000000000..0155ee50293 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.command.template; + +import org.apache.linkis.cli.application.constants.TestConstants; +import org.apache.linkis.cli.application.entity.command.CmdType; + +public enum TestCmdType implements CmdType { + TEST_PRIMARY(TestConstants.PRIMARY_COMMAND, 1, TestConstants.SPARK_DESC), + SPARK(TestConstants.SPARK, 1, TestConstants.SPARK_DESC); + // TEST(TestConstants.TEST_COMMAND, 0, TestConstants.TEST_DESC); + + private int id; + private String name; + private String desc; + + TestCmdType(String name, int id) { + this.id = id; + this.name = name; + this.desc = null; + } + + TestCmdType(String name, int id, String desc) { + this.id = id; + this.name = name; + this.desc = desc; + } + + @Override + public int getId() { + return this.id; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public String getDesc() { + return this.desc; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java new file mode 100644 index 00000000000..a077cfcca7f --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.command.template; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.TestConstants; +import org.apache.linkis.cli.application.interactor.command.parser.transformer.ParamKeyMapper; + +public class TestParamMapper extends ParamKeyMapper { + @Override + public void initMapperRules() { + super.updateMapping(TestConstants.PARAM_COMMON_CMD, CliKeys.JOB_EXEC_CODE); + super.updateMapping(TestConstants.PARAM_PROXY, CliKeys.LINKIS_COMMON_GATEWAY_URL); + super.updateMapping(TestConstants.PARAM_USER, CliKeys.LINKIS_COMMON_TOKEN_KEY); + super.updateMapping(TestConstants.PARAM_USR_CONF, CliKeys.LINKIS_CLIENT_USER_CONFIG); + super.updateMapping(TestConstants.PARAM_PASSWORD, CliKeys.LINKIS_COMMON_TOKEN_VALUE); + super.updateMapping(TestConstants.PARAM_PROXY_USER, CliKeys.JOB_COMMON_PROXY_USER); + + updateMapping( + TestConstants.PARAM_SPARK_EXECUTOR_CORES, TestConstants.LINKIS_SPARK_EXECUTOR_CORES); + updateMapping( + TestConstants.PARAM_SPARK_EXECUTOR_MEMORY, TestConstants.LINKIS_SPARK_EXECUTOR_MEMORY); + updateMapping( + TestConstants.PARAM_SPARK_NUM_EXECUTORS, TestConstants.LINKIS_SPARK_NUM_EXECUTORS); + updateMapping("spark.executor.instances", TestConstants.LINKIS_SPARK_NUM_EXECUTORS); + // updateMapping(SparkCommandConstants.PARAM_SPARK_NAME, + // SparkCommandConstants.LINKIS_SPARK_NAME); + updateMapping( + TestConstants.PARAM_SPARK_SHUFFLE_PARTITIONS, + TestConstants.LINKIS_SPARK_SHUFFLE_PARTITIONS); + updateMapping(TestConstants.PARAM_SPARK_RUNTYPE, CliKeys.JOB_LABEL_CODE_TYPE); + updateMapping(TestConstants.PARAM_YARN_QUEUE, TestConstants.YARN_QUEUE); + } + + // super.updateMapping("key1", "spark.cmd"); //should throw exception + // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); + // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); + +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java new file mode 100644 index 00000000000..fb53803b6ae --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.command.template; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.TestConstants; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.interactor.command.template.option.StdOption; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** @description: CommandTemplate for Spark Jobs */ +public class TestSparkCmdTemplate extends AbstractCmdTemplate { + protected StdOption databaseOp = + option( + null, + TestConstants.PARAM_DB, + new String[] {"-d", "--database"}, + "specify database", + true, + ""); + protected StdOption proxyOp = + option( + null, + TestConstants.PARAM_PROXY, + new String[] {"-x", "--proxy"}, + "specify proxy url", + true, + ""); + protected StdOption userOp = + option( + null, + CliKeys.JOB_COMMON_SUBMIT_USER, + new String[] {"-u", "--user"}, + "specify user", + true, + ""); + protected StdOption confOp = + option( + null, + TestConstants.PARAM_USR_CONF, + new String[] {"-c", "--conf"}, + "specify configuration from property file", + true, + ""); + private Logger logger = LoggerFactory.getLogger(TestSparkCmdTemplate.class); + private StdOption passwordOp = + option( + null, + CliKeys.JOB_COMMON_SUBMIT_PASSWORD, + new String[] {"-pwd", "--passwd"}, + "specify user password", + true, + ""); + private StdOption syncOp = + option( + null, + TestConstants.PARAM_SYNC_KEY, + new String[] {"-sk", "--synckey"}, + "specify sync key", + true, + ""); + private StdOption proxyUserOp = + option( + null, + TestConstants.PARAM_PROXY_USER, + new String[] {"-pu", "--proxy-user"}, + "specify proxy user", + true, + ""); + + private StdOption helpOp = + option(null, TestConstants.PARAM_HELP, new String[] {"-h", "--help"}, "help info", true, ""); + + private StdOption> confMap = + option( + null, + CliKeys.JOB_PARAM_CONF, + new String[] {"-confMap"}, + "confMap", + true, + new HashMap<>()); + + private StdOption filePara = + option( + null, + TestConstants.PARAM_COMMON_FILE, + new String[] {"--file", "-f"}, + "Spark SQL File to Execute!", + true, + ""); + + private StdOption commandPara = + option( + null, + TestConstants.PARAM_COMMON_CMD, + new String[] {"--cmd"}, + "Spark SQL Command to Execute!", + true, + ""); + + private StdOption argsPara = + option( + null, + TestConstants.PARAM_COMMON_ARGS, + new String[] {"--args", "-a"}, + "Set command args, k-v pairs delimited by comma, e.g. key1=value1,key2=value2,...", + true, + ""); + + private StdOption splitPara = + option( + null, + TestConstants.PARAM_COMMON_SPLIT, + new String[] {"--split", "-s"}, + "specify the split character string", + true, + ","); + + private StdOption queuePara = + option( + null, + TestConstants.PARAM_YARN_QUEUE, + new String[] {"--queue", "-q"}, + "specify the queue", + true, + "default"); + + private StdOption namePara = + option( + null, + TestConstants.PARAM_SPARK_NAME, + new String[] {"--name", "-n"}, + "specify the application name. WARNING:this option is deprecated. Linkis does not support this variable", + true, + ""); + + private StdOption> hiveconfPara = + option( + null, + TestConstants.PARAM_SPARK_HIVECONF, + new String[] {"--hiveconf", "-hc"}, + "specify the hiveconf setting,e.g. hive.cli.print.header=false", + true, + new HashMap<>()); + + private StdOption nePara = + option( + null, + TestConstants.PARAM_SPARK_NUM_EXECUTORS, + new String[] {"--num-executors", "-ne"}, + "specify the spark application container", + true, + 3); + + private StdOption ecPara = + option( + null, + TestConstants.PARAM_SPARK_EXECUTOR_CORES, + new String[] {"--executor-cores", "-ec"}, + "specify the spark application container vcores(less than queue's max vcores)", + true, + 2); + + private StdOption emPara = + option( + null, + TestConstants.PARAM_SPARK_EXECUTOR_MEMORY, + new String[] {"--executor-memory", "-em"}, + "specify the spark application executor's memory, 1.5G-2G/vcore", + true, + "4G"); + + private StdOption spPara = + option( + null, + TestConstants.PARAM_SPARK_SHUFFLE_PARTITIONS, + new String[] {"--shuffle-partitions", "-sp"}, + "specify the spark.sql.shuffle.partitions", + true, + 200); + + private StdOption> otherPara = + option( + null, + TestConstants.PARAM_COMMON_OTHER_KV, + new String[] {"--other"}, + "specify the other parameters", + true, + new HashMap<>()); + + // private CmdOption runTypePara = option(TestConstants.PARAM_SPARK_RUNTYPE, new + // String[]{"--runtype"}, + // "specify the runtype parameters: sql pyspark scala", true, "sql"); + + public TestSparkCmdTemplate() { + super(TestCmdType.SPARK); + } + + @Override + public void checkParams() throws CommandException {} + + @Override + protected Object clone() throws CloneNotSupportedException { + return super.clone(); + } + + @Override + public TestSparkCmdTemplate getCopy() { + return (TestSparkCmdTemplate) super.getCopy(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java index 1b3b7c67ec1..ff71dd0c430 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java @@ -17,6 +17,8 @@ package org.apache.linkis.cli.application.interactor.job; +import org.apache.linkis.cli.application.interactor.job.common.LinkisJobStatus; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/KeyParserTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/KeyParserTest.java new file mode 100644 index 00000000000..4c6c494c20b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/KeyParserTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.builder; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.interactor.job.common.KeyParser; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class KeyParserTest { + + @Test + @DisplayName("removePrefixForKeysInMapTest") + public void removePrefixForKeysInMapTest() { + + Map map = new HashMap<>(); + map.put(CliKeys.JOB_PARAM_CONF, new Object()); + map.put("name", new Object()); + + Map stringObjectMap = KeyParser.removePrefixForKeysInMap(map); + Assertions.assertTrue(1 == stringObjectMap.size()); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/JobCmdSubTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/JobCmdSubTypeTest.java new file mode 100644 index 00000000000..c720e7f2c2a --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/JobCmdSubTypeTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.subtype; + +import org.apache.linkis.cli.application.interactor.job.jobcmd.JobCmdSubType; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class JobCmdSubTypeTest { + + @Test + @DisplayName("enumTest") + public void enumTest() { + + String killName = JobCmdSubType.KILL.getName(); + String logName = JobCmdSubType.LOG.getName(); + String descName = JobCmdSubType.DESC.getName(); + String statusName = JobCmdSubType.STATUS.getName(); + String listName = JobCmdSubType.LIST.getName(); + String resultName = JobCmdSubType.RESULT.getName(); + + Assertions.assertEquals("kill", killName); + Assertions.assertEquals("log", logName); + Assertions.assertEquals("desc", descName); + Assertions.assertEquals("status", statusName); + Assertions.assertEquals("list", listName); + Assertions.assertEquals("result", resultName); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java index 5af0e6ab857..8e07a5a6bb5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java @@ -28,7 +28,7 @@ public class UtilsTest { public void isValidExecIdTest() { String execId = "0001"; - boolean validExecId = Utils.isValidExecId(execId); + boolean validExecId = CliUtils.isValidExecId(execId); Assertions.assertTrue(validExecId); } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/linkis-cli.properties rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/user.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/user.properties similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/user.properties rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/user.properties diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/linkis-cli.properties rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/log4j2.xml b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/log4j2.xml similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/log4j2.xml rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/log4j2.xml diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/testScala.scala b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/testScala.scala similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/testScala.scala rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/testScala.scala diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml b/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml index 36090c566af..eeeda2249d8 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml @@ -58,7 +58,7 @@ org.apache.linkis - linkis-bml-client + linkis-pes-client ${project.version} @@ -69,10 +69,6 @@ org.apache.linkis linkis-hadoop-common - - org.json4s - json4s-jackson_${scala.binary.version} - diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala index 3daba19410a..2d9bb094fc7 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobBuilder.scala @@ -175,7 +175,7 @@ object LinkisJobBuilder { private var serverUrl: String = _ private var authTokenValue: String = - "LINKIS_CLI_TEST" // This is the default authToken, we usually suggest set different ones for users. + Configuration.LINKIS_TOKEN.getValue // This is the default authToken, we usually suggest set different ones for users. def setDefaultClientConfig(clientConfig: DWSClientConfig): Unit = this.clientConfig = clientConfig diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala index d44c479abb2..80e8e7ad429 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/LinkisJobClient.scala @@ -17,11 +17,37 @@ package org.apache.linkis.computation.client -import org.apache.linkis.computation.client.interactive.InteractiveJob -import org.apache.linkis.computation.client.once.OnceJob +import org.apache.linkis.bml.client.BmlClientFactory +import org.apache.linkis.computation.client.interactive.{InteractiveJob, InteractiveJobBuilder} +import org.apache.linkis.computation.client.once.{LinkisManagerClient, OnceJob} +import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SimpleOnceJobBuilder} +import org.apache.linkis.httpclient.dws.config.DWSClientConfig +import org.apache.linkis.ujes.client.UJESClientImpl import java.io.Closeable +class LinkisJobClient(clientConfig: DWSClientConfig) extends Closeable { + + private val ujseClient = new UJESClientImpl(clientConfig) + + private lazy val linkisManagerCLient = LinkisManagerClient(ujseClient) + + override def close(): Unit = { + if (null != linkisManagerCLient) { + linkisManagerCLient.close() + } + } + + def onceJobBuilder(): SimpleOnceJobBuilder = + SimpleOnceJob.builder(SimpleOnceJobBuilder.getBmlClient(clientConfig), linkisManagerCLient) + + def interactiveJobBuilder(): InteractiveJobBuilder = { + val builder = InteractiveJob.builder() + builder.setUJESClient(ujseClient) + } + +} + /** * This class is only used to provide a unified entry for user to build a LinkisJob conveniently and * simply. Please keep this class lightweight enough, do not set too many field to confuse user. diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala index 3ac3cb7c88b..bc1bb75f554 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala @@ -17,19 +17,24 @@ package org.apache.linkis.computation.client.once +import org.apache.linkis.common.utils.Utils import org.apache.linkis.computation.client.once.action.{ + AskEngineConnAction, CreateEngineConnAction, EngineConnOperateAction, GetEngineConnAction, KillEngineConnAction, - LinkisManagerAction + LinkisManagerAction, + ListEngineConnAction } import org.apache.linkis.computation.client.once.result.{ + AskEngineConnResult, CreateEngineConnResult, EngineConnOperateResult, GetEngineConnResult, KillEngineConnResult, - LinkisManagerResult + LinkisManagerResult, + ListEngineConnResult } import org.apache.linkis.httpclient.dws.DWSHttpClient import org.apache.linkis.httpclient.request.Action @@ -39,12 +44,16 @@ import java.io.Closeable trait LinkisManagerClient extends Closeable { + def askEngineConn(askEngineConnAction: AskEngineConnAction): AskEngineConnResult + def createEngineConn(createEngineConnAction: CreateEngineConnAction): CreateEngineConnResult def getEngineConn(getEngineConnAction: GetEngineConnAction): GetEngineConnResult def killEngineConn(killEngineConnAction: KillEngineConnAction): KillEngineConnResult + def listEngineConn(listEngineConnAction: ListEngineConnAction): ListEngineConnResult + def executeEngineConnOperation( engineConnOperateAction: EngineConnOperateAction ): EngineConnOperateResult @@ -82,7 +91,25 @@ class LinkisManagerClientImpl(ujesClient: UJESClient) extends LinkisManagerClien override def executeEngineConnOperation( engineConnOperateAction: EngineConnOperateAction - ): EngineConnOperateResult = execute(engineConnOperateAction) + ): EngineConnOperateResult = { + Utils.tryCatch { + val rs = execute[EngineConnOperateResult](engineConnOperateAction) + rs + } { case e: Exception => + val rs = new EngineConnOperateResult + rs.setIsError(true) + rs.setErrorMsg(e.getMessage) + rs + } + } override def close(): Unit = ujesClient.close() + + override def askEngineConn(askEngineConnAction: AskEngineConnAction): AskEngineConnResult = + execute(askEngineConnAction) + + override def listEngineConn(listEngineConnAction: ListEngineConnAction): ListEngineConnResult = { + execute(listEngineConnAction) + } + } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/AskEngineConnAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/AskEngineConnAction.scala new file mode 100644 index 00000000000..4b89b53764e --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/AskEngineConnAction.scala @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.action + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.request.POSTAction +import org.apache.linkis.ujes.client.exception.UJESJobException + +import org.apache.commons.lang3.StringUtils + +import java.util + +class AskEngineConnAction extends POSTAction with LinkisManagerAction { + + override def getRequestPayload: String = + DWSHttpClient.jacksonJson.writeValueAsString(getRequestPayloads) + + override def suffixURLs: Array[String] = Array("linkisManager", "askEngineConn") + +} + +object AskEngineConnAction { + + def newBuilder(): Builder = new Builder + + class Builder private[AskEngineConnAction] () { + private var user: String = _ + private var properties: util.Map[String, String] = _ + private var labels: util.Map[String, String] = _ + private var maxSubmitTime: Long = _ + private var createService: String = _ + private var description: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setProperties(properties: util.Map[String, String]): Builder = { + this.properties = properties + this + } + + def setLabels(labels: java.util.Map[String, String]): Builder = { + this.labels = labels + this + } + + def setMaxSubmitTime(maxSubmitTime: Long): Builder = { + this.maxSubmitTime = maxSubmitTime + this + } + + def setCreateService(createService: String): Builder = { + this.createService = createService + this + } + + def setDescription(description: String): Builder = { + this.description = description + this + } + + def build(): AskEngineConnAction = { + val action = new AskEngineConnAction() + if (user == null) throw new UJESJobException("user is needed!") + if (properties == null) properties = new java.util.HashMap[String, String] + if (labels == null) throw new UJESJobException("labels is needed!") + action.setUser(user) + action.addRequestPayload("properties", properties) + action.addRequestPayload("labels", labels) + if (StringUtils.isNotBlank(createService)) { + action.addRequestPayload("createService", createService) + } + if (null != maxSubmitTime) { + action.addRequestPayload("timeOut", maxSubmitTime) + } + if (StringUtils.isNotBlank(description)) { + action.addRequestPayload("description", description) + } + action + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/ListEngineConnAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/ListEngineConnAction.scala new file mode 100644 index 00000000000..c76a5e78e3b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/ListEngineConnAction.scala @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.action + +import org.apache.linkis.httpclient.request.GetAction +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +class ListEngineConnAction extends GetAction with LinkisManagerAction { + override def suffixURLs: Array[String] = Array("linkisManager", "listUserEngines") +} + +object ListEngineConnAction { + def newBuilder(): Builder = new Builder + + class Builder private[ListEngineConnAction] () { + + private var user: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def build(): ListEngineConnAction = { + if (user == null) throw new UJESClientBuilderException("user is needed!") + val listEngineConnAction = new ListEngineConnAction + listEngineConnAction.setUser(user) + listEngineConnAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/AskEngineConnResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/AskEngineConnResult.scala new file mode 100644 index 00000000000..58c6085b457 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/AskEngineConnResult.scala @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.result + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult + +@DWSHttpMessageResult("/api/rest_j/v\\d+/linkisManager/askEngineConn") +class AskEngineConnResult extends GetEngineConnResult diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala index 1bf12e0418b..50df73bd106 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala @@ -33,9 +33,11 @@ class EngineConnOperateResult extends LinkisManagerResult { this.result = result } + def getErrorMsg(): String = errorMsg + def setErrorMsg(errorMsg: String): Unit = this.errorMsg = errorMsg - def setError(isError: Boolean): Unit = this.isError = isError + def getIsError(): Boolean = isError def setIsError(isError: Boolean): Unit = this.isError = isError diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala index e964cd714cf..b20923de899 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/GetEngineConnResult.scala @@ -17,6 +17,7 @@ package org.apache.linkis.computation.client.once.result +import org.apache.linkis.common.ServiceInstance import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult import java.util @@ -32,4 +33,41 @@ class GetEngineConnResult extends LinkisManagerResult { def getNodeInfo: util.Map[String, Any] = engineConnNode + protected def getAs[T](map: util.Map[String, Any], key: String): T = + map.get(key).asInstanceOf[T] + + def getTicketId(): String = getAs(engineConnNode, "ticketId") + + def getServiceInstance(): ServiceInstance = + engineConnNode.get("serviceInstance") match { + case serviceInstance: util.Map[String, Any] => + ServiceInstance( + getAs(serviceInstance, "applicationName"), + getAs(serviceInstance, "instance") + ) + case _ => null + } + + def getNodeStatus(): String = getAs(engineConnNode, "nodeStatus") + + def getECMServiceInstance(): ServiceInstance = + engineConnNode.get("ecmServiceInstance") match { + case serviceInstance: util.Map[String, Any] => + ServiceInstance( + getAs(serviceInstance, "applicationName"), + getAs(serviceInstance, "instance") + ) + case _ => null + } + + def getManagerServiceInstance(): ServiceInstance = + engineConnNode.get("managerServiceInstance") match { + case serviceInstance: util.Map[String, Any] => + ServiceInstance( + getAs(serviceInstance, "applicationName"), + getAs(serviceInstance, "instance") + ) + case _ => null + } + } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/ListEngineConnResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/ListEngineConnResult.scala new file mode 100644 index 00000000000..c31ccf481fa --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/ListEngineConnResult.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.result + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult + +import java.util + +@DWSHttpMessageResult("/api/rest_j/v\\d+/linkisManager/listUserEngines") +class ListEngineConnResult extends LinkisManagerResult { + + private var engines: util.List[util.Map[String, AnyRef]] = _ + + def setEngines(engines: util.List[util.Map[String, AnyRef]]): Unit = { + this.engines = engines + } + + def getEngines: util.List[util.Map[String, AnyRef]] = engines + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala index baab361b584..13d96c238a0 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala @@ -17,6 +17,7 @@ package org.apache.linkis.computation.client.once.simple +import org.apache.linkis.bml.client.BmlClient import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.utils.Utils import org.apache.linkis.computation.client.LinkisJobMetrics @@ -94,6 +95,10 @@ trait SimpleOnceJob extends OnceJob { case operator => operator } + def getEcServiceInstance: ServiceInstance = serviceInstance + + def getEcTicketId: String = ticketId + } class SubmittableSimpleOnceJob( @@ -153,6 +158,11 @@ object SimpleOnceJob { def builder(): SimpleOnceJobBuilder = new SimpleOnceJobBuilder + def builder( + bmlClient: BmlClient, + linkisManagerClient: LinkisManagerClient + ): SimpleOnceJobBuilder = new SimpleOnceJobBuilder(bmlClient, linkisManagerClient) + /** * Build a submitted SimpleOnceJob by id and user. * @param id diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala index dc4451ff0ff..510aabf7f40 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJobBuilder.scala @@ -18,8 +18,10 @@ package org.apache.linkis.computation.client.once.simple import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory} +import org.apache.linkis.common.conf.Configuration.LINKIS_TOKEN import org.apache.linkis.common.utils.Utils import org.apache.linkis.computation.client.LinkisJobBuilder +import org.apache.linkis.computation.client.LinkisJobBuilder.clientConfig import org.apache.linkis.computation.client.once.LinkisManagerClient import org.apache.linkis.computation.client.once.action.CreateEngineConnAction import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder._ @@ -28,6 +30,8 @@ import org.apache.linkis.governance.common.entity.job.OnceExecutorContent import org.apache.linkis.governance.common.utils.OnceExecutorContentUtils import org.apache.linkis.governance.common.utils.OnceExecutorContentUtils.BmlResource import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.ujes.client.exception.UJESJobException @@ -38,12 +42,19 @@ import java.util import scala.collection.convert.WrapAsJava._ import scala.collection.convert.WrapAsScala._ -class SimpleOnceJobBuilder private[simple] () extends LinkisJobBuilder[SubmittableSimpleOnceJob] { +class SimpleOnceJobBuilder private[simple] ( + private val bmlClient: BmlClient, + private val linkisManagerClient: LinkisManagerClient +) extends LinkisJobBuilder[SubmittableSimpleOnceJob] { private var createService: String = _ private var maxSubmitTime: Long = _ private var description: String = _ + def this() = { + this(null, null) + } + def setCreateService(createService: String): this.type = { this.createService = createService this @@ -69,10 +80,26 @@ class SimpleOnceJobBuilder private[simple] () extends LinkisJobBuilder[Submittab val contentMap = OnceExecutorContentUtils.contentToMap(onceExecutorContent) val bytes = DWSHttpClient.jacksonJson.writeValueAsBytes(contentMap) val response = - getBmlClient.uploadResource(executeUser, getFilePath, new ByteArrayInputStream(bytes)) + getThisBMLClient.uploadResource(executeUser, getFilePath, new ByteArrayInputStream(bytes)) OnceExecutorContentUtils.resourceToValue(BmlResource(response.resourceId, response.version)) } + protected def getThisBMLClient(): BmlClient = { + if (null == this.bmlClient) { + getBmlClient(LinkisJobBuilder.getDefaultClientConfig) + } else { + this.bmlClient + } + } + + protected def getThisLinkisManagerClient(): LinkisManagerClient = { + if (null == this.linkisManagerClient) { + getLinkisManagerClient + } else { + this.linkisManagerClient + } + } + override def build(): SubmittableSimpleOnceJob = { ensureNotNull(labels, "labels") ensureNotNull(jobContent, "jobContent") @@ -99,7 +126,7 @@ class SimpleOnceJobBuilder private[simple] () extends LinkisJobBuilder[Submittab .setMaxSubmitTime(maxSubmitTime) .setDescription(description) .build() - new SubmittableSimpleOnceJob(getLinkisManagerClient, createEngineConnAction) + new SubmittableSimpleOnceJob(getThisLinkisManagerClient, createEngineConnAction) } implicit def toMap(map: util.Map[String, Any]): util.Map[String, String] = map.map { @@ -128,10 +155,27 @@ object SimpleOnceJobBuilder { private var bmlClient: BmlClient = _ private var linkisManagerClient: LinkisManagerClient = _ - def getBmlClient: BmlClient = { + def getBmlClient(clientConfig: DWSClientConfig): BmlClient = { if (bmlClient == null) synchronized { if (bmlClient == null) { - bmlClient = BmlClientFactory.createBmlClient(LinkisJobBuilder.getDefaultClientConfig) + val newClientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(clientConfig.getServerUrl) + .connectionTimeout(clientConfig.getConnectTimeout) + .discoveryEnabled(clientConfig.isDiscoveryEnabled) + .loadbalancerEnabled(clientConfig.isLoadbalancerEnabled) + .maxConnectionSize(clientConfig.getMaxConnection) + .retryEnabled(clientConfig.isRetryEnabled) + .setRetryHandler(clientConfig.getRetryHandler) + .readTimeout( + clientConfig.getReadTimeout + ) // We think 90s is enough, if SocketTimeoutException is throw, just set a new clientConfig to modify it. + .setAuthenticationStrategy(new TokenAuthenticationStrategy()) + .setAuthTokenKey(TokenAuthenticationStrategy.TOKEN_KEY) + .setAuthTokenValue(LINKIS_TOKEN.getValue) + .setDWSVersion(clientConfig.getDWSVersion) + .build() + bmlClient = BmlClientFactory.createBmlClient(newClientConfig) Utils.addShutdownHook(() => bmlClient.close()) } } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala index 83399bf3714..a1dba634043 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala @@ -19,6 +19,7 @@ package org.apache.linkis.computation.client.operator.impl import org.apache.linkis.computation.client.once.result.EngineConnOperateResult import org.apache.linkis.computation.client.operator.OnceJobOperator +import org.apache.linkis.governance.common.constant.ec.ECConstants import org.apache.linkis.ujes.client.exception.UJESJobException class EngineConnApplicationInfoOperator extends OnceJobOperator[ApplicationInfo] { @@ -28,7 +29,7 @@ class EngineConnApplicationInfoOperator extends OnceJobOperator[ApplicationInfo] override protected def resultToObject(result: EngineConnOperateResult): ApplicationInfo = { ApplicationInfo( result - .getAsOption("applicationId") + .getAsOption(ECConstants.YARN_APPID_NAME_KEY) .getOrElse( throw new UJESJobException( 20300, @@ -36,14 +37,14 @@ class EngineConnApplicationInfoOperator extends OnceJobOperator[ApplicationInfo] ) ), result - .getAsOption("applicationUrl") + .getAsOption(ECConstants.YARN_APP_URL_KEY) .getOrElse( throw new UJESJobException( 20300, s"Cannot get applicationUrl from EngineConn $getServiceInstance." ) ), - result.getAs("queue") + result.getAs(ECConstants.QUEUE) ) } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/LinkisFSClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/LinkisFSClient.scala new file mode 100644 index 00000000000..3e7f6755929 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/LinkisFSClient.scala @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client + +import org.apache.linkis.ujes.client.request.{ + CreateNewDirAction, + IsPathExistAction, + UploadFileAction +} +import org.apache.linkis.ujes.client.response.{ + CreateNewDirResult, + IsPathExistResult, + UploadFileResult +} + +class LinkisFSClient(client: UJESClient) { + + def isPathExist(isPathExistAction: IsPathExistAction): Boolean = { + val result = client.executeUJESJob(isPathExistAction).asInstanceOf[IsPathExistResult] + result.isExist + } + + def createNewDir(makeDirAction: CreateNewDirAction): CreateNewDirResult = { + client.executeUJESJob(makeDirAction).asInstanceOf[CreateNewDirResult] + } + + def upload(uploadFileAction: UploadFileAction): UploadFileResult = { + client.executeUJESJob(uploadFileAction).asInstanceOf[UploadFileResult] + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala index 6431c47ebff..c72a74e2e67 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala @@ -17,18 +17,21 @@ package org.apache.linkis.ujes.client +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.httpclient.authentication.AuthenticationStrategy import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} import org.apache.linkis.httpclient.response.Result import org.apache.linkis.ujes.client.request._ import org.apache.linkis.ujes.client.request.JobExecIdAction.JobServiceType -import org.apache.linkis.ujes.client.response._ +import org.apache.linkis.ujes.client.response.{EncryptTokenResult, _} +import org.apache.linkis.ujes.client.utils.UJESClientUtils import java.io.Closeable +import java.util import java.util.concurrent.TimeUnit -abstract class UJESClient extends Closeable { +abstract class UJESClient extends Closeable with Logging { def execute(jobExecuteAction: JobExecuteAction): JobExecuteResult = executeUJESJob( jobExecuteAction @@ -37,7 +40,7 @@ abstract class UJESClient extends Closeable { def submit(jobSubmitAction: JobSubmitAction): JobSubmitResult = executeUJESJob(jobSubmitAction).asInstanceOf[JobSubmitResult] - protected[client] def executeUJESJob(ujesJobAction: UJESJobAction): Result + def executeUJESJob(ujesJobAction: UJESJobAction): Result private def executeJobExecIdAction[T]( jobExecuteResult: JobExecuteResult, @@ -52,12 +55,37 @@ abstract class UJESClient extends Closeable { executeUJESJob(jobExecIdAction).asInstanceOf[T] } + /** + * only get the status of the cache Task status should be based on getJobInfo + * @param jobExecuteResult + * @return + */ def status(jobExecuteResult: JobExecuteResult): JobStatusResult = executeJobExecIdAction(jobExecuteResult, JobServiceType.JobStatus) + /** + * IF exception return null progress result + * @param jobExecuteResult + * @return + */ def progress(jobExecuteResult: JobExecuteResult): JobProgressResult = - executeJobExecIdAction(jobExecuteResult, JobServiceType.JobProgress) - + Utils.tryCatch( + executeJobExecIdAction(jobExecuteResult, JobServiceType.JobProgress) + .asInstanceOf[JobProgressResult] + ) { t => + logger.warn("Failed to get progress, return empty progress.", t) + val result = new JobProgressResult + result.setProgress(0) + result + } + + /** + * If exception return null log + * @param jobExecuteResult + * @param fromLine + * @param size + * @return + */ def log(jobExecuteResult: JobExecuteResult, fromLine: Int, size: Int): JobLogResult = { val jobLogAction = JobLogAction .builder() @@ -66,13 +94,19 @@ abstract class UJESClient extends Closeable { .setFromLine(fromLine) .setSize(size) .build() - executeUJESJob(jobLogAction).asInstanceOf[JobLogResult] - } - def list(jobListAction: JobListAction): JobListResult = { - executeUJESJob(jobListAction).asInstanceOf[JobListResult] + Utils.tryCatch(executeUJESJob(jobLogAction).asInstanceOf[JobLogResult]) { t => + logger.warn("Failed to get Log, return empty log.", t) + null + } } + /** + * If exception return null log + * @param jobExecuteResult + * @param jobLogResult + * @return + */ def log(jobExecuteResult: JobExecuteResult, jobLogResult: JobLogResult): JobLogResult = { val jobLogAction = JobLogAction .builder() @@ -80,13 +114,21 @@ abstract class UJESClient extends Closeable { .setUser(jobExecuteResult.getUser) .setFromLine(jobLogResult.getFromLine) .build() - executeUJESJob(jobLogAction).asInstanceOf[JobLogResult] + + Utils.tryCatch(executeUJESJob(jobLogAction).asInstanceOf[JobLogResult]) { t => + logger.warn("Failed to get Log, return empty log.", t) + null + } } def openLog(openLogAction: OpenLogAction): OpenLogResult = { executeUJESJob(openLogAction).asInstanceOf[OpenLogResult] } + def list(jobListAction: JobListAction): JobListResult = { + executeUJESJob(jobListAction).asInstanceOf[JobListResult] + } + def kill(jobExecuteResult: JobExecuteResult): JobKillResult = executeJobExecIdAction(jobExecuteResult, JobServiceType.JobKill) @@ -141,6 +183,13 @@ abstract class UJESClient extends Closeable { executeUJESJob(jobDeleteObserveAction).asInstanceOf[JobDeleteObserveResult] } + def getEncryptToken(token: String, user: String): String = { + val encryptTokenResult = executeUJESJob( + EncryptTokenAction.newBuilder().setUser(user).setToken(token).build() + ).asInstanceOf[EncryptTokenResult] + encryptTokenResult.encryptToken + } + } object UJESClient { diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala index b173f53d551..0feabaafda6 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClientImpl.scala @@ -26,7 +26,7 @@ import org.apache.linkis.ujes.client.request.UJESJobAction class UJESClientImpl(clientConfig: DWSClientConfig) extends UJESClient { private val dwsHttpClient = new DWSHttpClient(clientConfig, "Linkis-Job-Execution-Thread") - override protected[client] def executeUJESJob(ujesJobAction: UJESJobAction): Result = + override def executeUJESJob(ujesJobAction: UJESJobAction): Result = ujesJobAction match { case action: Action => dwsHttpClient.execute(action) } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/CreateNewDirAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/CreateNewDirAction.scala new file mode 100644 index 00000000000..561bfc07d12 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/CreateNewDirAction.scala @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.request + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.request.POSTAction +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +class CreateNewDirAction extends POSTAction with UJESJobAction { + + override def suffixURLs: Array[String] = Array("filesystem", "createNewDir") + + override def getRequestPayload: String = + DWSHttpClient.jacksonJson.writeValueAsString(getRequestPayloads) + +} + +object CreateNewDirAction { + def builder(): Builder = new Builder + + class Builder private[CreateNewDirAction] () { + private var user: String = _ + private var path: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setPath(path: String): Builder = { + this.path = path + this + } + + def build(): CreateNewDirAction = { + val makeDirAction = new CreateNewDirAction + if (user == null) throw new UJESClientBuilderException("user is needed!") + if (path == null) throw new UJESClientBuilderException("path is needed!") + makeDirAction.setUser(user) + makeDirAction.addRequestPayload("path", path) + makeDirAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/EncryptTokenAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/EncryptTokenAction.scala new file mode 100644 index 00000000000..71b9db3b57b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/EncryptTokenAction.scala @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.request + +import org.apache.linkis.httpclient.request.GetAction +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +import org.apache.commons.lang3.StringUtils + +class EncryptTokenAction extends GetAction with UJESJobAction { + + override def suffixURLs: Array[String] = + Array("basedata-manager", "gateway-auth-token", "encrypt-token") + +} + +object EncryptTokenAction { + def newBuilder(): Builder = new Builder + + class Builder private[EncryptTokenAction] () { + private var user: String = _ + private var token: String = _ + + def setToken(token: String): Builder = { + this.token = token + this + } + + def setUser(user: String): Builder = { + this.user = user + this + } + + def build(): EncryptTokenAction = { + val EncryptTokenAction = new EncryptTokenAction + if (token == null) throw new UJESClientBuilderException("token is needed!") + if (StringUtils.isNotBlank(token)) EncryptTokenAction.setParameter("token", token) + if (StringUtils.isNotBlank(user)) EncryptTokenAction.setUser(user) + EncryptTokenAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/IsPathExistAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/IsPathExistAction.scala new file mode 100644 index 00000000000..e9e74edd16b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/IsPathExistAction.scala @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.request + +import org.apache.linkis.httpclient.request.GetAction +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +class IsPathExistAction extends GetAction with UJESJobAction { + + override def suffixURLs: Array[String] = Array("filesystem", "isExist") +} + +object IsPathExistAction { + def builder(): Builder = new Builder + + class Builder private[IsPathExistAction] () { + private var user: String = _ + private var path: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setPath(path: String): Builder = { + this.path = path + this + } + + def build(): IsPathExistAction = { + val isPathExistAction = new IsPathExistAction + if (user == null) throw new UJESClientBuilderException("user is needed!") + if (path == null) throw new UJESClientBuilderException("path is needed!") + isPathExistAction.setUser(user) + isPathExistAction.setParameter("path", path) + isPathExistAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala index f96c6227fea..aba26c619f3 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/JobSubmitAction.scala @@ -25,6 +25,8 @@ import org.apache.linkis.ujes.client.exception.UJESClientBuilderException import java.util +import scala.collection.JavaConverters.mapAsScalaMapConverter + class JobSubmitAction private () extends POSTAction with UJESJobAction { override def suffixURLs: Array[String] = Array("entrance", "submit") @@ -52,6 +54,8 @@ object JobSubmitAction { private var source: util.Map[String, AnyRef] = _ + private var headers: util.Map[String, String] = _ + def addExecuteCode(executeCode: String): Builder = { if (null == executionContent) executionContent = new util.HashMap[String, AnyRef]() executionContent.put("code", executeCode) @@ -129,6 +133,11 @@ object JobSubmitAction { this } + def setHeaders(headers: util.Map[String, String]): Builder = { + this.headers = headers + this + } + def build(): JobSubmitAction = { val submitAction = new JobSubmitAction submitAction.setUser(user) @@ -145,6 +154,11 @@ object JobSubmitAction { if (this.labels == null) this.labels = new util.HashMap[String, AnyRef]() submitAction.addRequestPayload(TaskConstant.LABELS, this.labels) + + if (this.headers == null) this.headers = new util.HashMap[String, String]() + this.headers.asScala.foreach { case (k, v) => + if (k != null && v != null) submitAction.addHeader(k, v) + } submitAction } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala index 6b41b4c62b6..708689089a0 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala @@ -35,6 +35,13 @@ object ResultSetAction { private var pageSize: Int = _ private var charset: String = Configuration.BDP_ENCODING.getValue + // default value is :org.apache.linkis.storage.domain.Dolphin.LINKIS_NULL + private var nullValue: String = "LINKIS_NULL" + + private var enableLimit: Option[Boolean] = None + private var columnPage: Int = _ + private var columnPageSize: Int = _ + def setUser(user: String): Builder = { this.user = user this @@ -60,6 +67,26 @@ object ResultSetAction { this } + def setNullValue(nullValue: String): Builder = { + this.nullValue = nullValue + this + } + + def setEnableLimit(enableLimit: Boolean): Builder = { + this.enableLimit = Some(enableLimit) + this + } + + def setColumnPage(columnPage: Int): Builder = { + this.columnPage = columnPage + this + } + + def setColumnPageSize(columnPageSize: Int): Builder = { + this.columnPageSize = columnPageSize + this + } + def build(): ResultSetAction = { if (user == null) throw new UJESClientBuilderException("user is needed!") if (path == null) throw new UJESClientBuilderException("path is needed!") @@ -68,6 +95,18 @@ object ResultSetAction { if (page > 0) resultSetAction.setParameter("page", page) if (pageSize > 0) resultSetAction.setParameter("pageSize", pageSize) resultSetAction.setParameter("charset", charset) + if (enableLimit.isDefined) resultSetAction.setParameter("enableLimit", true) + resultSetAction.setParameter("nullValue", nullValue) + if (columnPage > 0) { + resultSetAction.setParameter("columnPage", columnPage) + } else { + resultSetAction.setParameter("columnPage", null) + } + if (columnPageSize > 0) { + resultSetAction.setParameter("columnPageSize", columnPageSize) + } else { + resultSetAction.setParameter("columnPageSize", null) + } resultSetAction.setUser(user) resultSetAction } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/UploadFileAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/UploadFileAction.scala new file mode 100644 index 00000000000..4248a9c7c6e --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/UploadFileAction.scala @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.request + +import org.apache.linkis.httpclient.request.{BinaryBody, GetAction, UploadAction} +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException + +import org.apache.http.entity.ContentType + +import java.io.{File, FileInputStream} +import java.util + +import scala.collection.JavaConverters._ + +class UploadFileAction extends GetAction with UploadAction with UJESJobAction { + override def suffixURLs: Array[String] = Array("filesystem", "upload") + + override val files: util.Map[String, String] = new util.HashMap[String, String]() + + override val binaryBodies: util.List[BinaryBody] = new util.ArrayList[BinaryBody](0) + +} + +object UploadFileAction { + def builder(): Builder = new Builder + + class Builder private[UploadFileAction] { + private var user: String = _ + private var path: String = _ + private var uploadFiles: util.List[File] = new util.ArrayList[File](0) + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setPath(path: String): Builder = { + this.path = path + this + } + + def addFile(file: File): Builder = { + this.uploadFiles.add(file) + this + } + + def build(): UploadFileAction = { + val uploadFileAction = new UploadFileAction + if (user == null) throw new UJESClientBuilderException("user is needed!") + if (path == null) throw new UJESClientBuilderException("path is needed!") + + uploadFileAction.setUser(user) + uploadFileAction.setParameter("path", path) + uploadFiles.asScala.foreach { file => + println(String.format("=============== upload file ========== %s ", file.getAbsolutePath)) + uploadFileAction.binaryBodies.add( + BinaryBody + .apply("file", new FileInputStream(file), file.getName, ContentType.MULTIPART_FORM_DATA) + ) + } + + uploadFileAction + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/CreateNewDirResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/CreateNewDirResult.scala new file mode 100644 index 00000000000..0871f4042ee --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/CreateNewDirResult.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.ujes.client.request.UserAction + +@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/createNewDir") +class CreateNewDirResult extends DWSResult with UserAction {} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/EncryptTokenResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/EncryptTokenResult.scala new file mode 100644 index 00000000000..98e49247d31 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/EncryptTokenResult.scala @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult + +import java.util + +import scala.beans.BeanProperty + +@DWSHttpMessageResult("/api/rest_j/v\\d+/basedata-manager/gateway-auth-token/encrypt-token") +class EncryptTokenResult extends DWSResult { + + @BeanProperty + var encryptToken: String = _ + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/IsPathExistResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/IsPathExistResult.scala new file mode 100644 index 00000000000..c87cd7d2c7a --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/IsPathExistResult.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.ujes.client.request.UserAction + +import scala.beans.BeanProperty + +@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/isExist") +class IsPathExistResult extends DWSResult with UserAction { + @BeanProperty var isExist: Boolean = _ +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobInfoResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobInfoResult.scala index f8e6456da2c..6cb5ce08b2b 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobInfoResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobInfoResult.scala @@ -27,9 +27,13 @@ import org.apache.linkis.ujes.client.request.{ResultSetListAction, UserAction} import org.apache.commons.beanutils.BeanUtils +import java.io.File +import java.nio.file.Files import java.util import java.util.Date +import scala.util.matching.Regex + @DWSHttpMessageResult("/api/rest_j/v\\d+/jobhistory/\\S+/get") class JobInfoResult extends DWSResult with UserAction with Status { @@ -78,7 +82,10 @@ class JobInfoResult extends DWSResult with UserAction with Status { ujesClient.executeUJESJob(ResultSetListAction.builder().set(this).build()) match { case resultSetList: ResultSetListResult => resultSetList.getResultSetList } - resultSetList + val numberRegex: Regex = """(\d+)""".r + return resultSetList.sortBy { fileName => + numberRegex.findFirstIn(fileName.split(File.separator).last).getOrElse("0").toInt + } } else if (resultSetList != null) resultSetList else if (isFailed) { diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala index 0cf163c286f..e3330d00a84 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/JobProgressResult.scala @@ -17,6 +17,7 @@ package org.apache.linkis.ujes.client.response +import org.apache.linkis.common.utils.JsonUtils import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult import org.apache.linkis.protocol.engine.JobProgressInfo @@ -24,9 +25,6 @@ import java.util import scala.collection.JavaConverters._ -import org.json4s._ -import org.json4s.jackson.Serialization._ - @DWSHttpMessageResult("/api/rest_j/v\\d+/entrance/(\\S+)/progress") class JobProgressResult extends UJESJobResult { @@ -34,15 +32,20 @@ class JobProgressResult extends UJESJobResult { private var progressInfo: util.List[util.Map[String, AnyRef]] = _ private var progressInfos: Array[JobProgressInfo] = _ - private implicit val formats = DefaultFormats - def setProgress(progress: Float): Unit = this.progress = progress def getProgress: Float = progress def setProgressInfo(progressInfo: util.List[util.Map[String, AnyRef]]): Unit = { this.progressInfo = progressInfo - progressInfos = - progressInfo.asScala.map(map => read[JobProgressInfo](write(map.asScala.toMap))).toArray + progressInfos = progressInfo.asScala + .map(map => + JsonUtils.jackson + .readValue( + JsonUtils.jackson.writeValueAsString(map.asScala.toMap), + classOf[JobProgressInfo] + ) + ) + .toArray } def getProgressInfo: Array[JobProgressInfo] = progressInfos diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala index fe107a32ca4..2de5758aeaa 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala @@ -26,7 +26,7 @@ import scala.beans.BeanProperty class OpenLogResult extends DWSResult { /** - * log[0] - info log[1] - warn log[2] - error log[3] - all (info + warn + error) + * log[0] - error log[1] - warn log[2] - info log[3] - all (info + warn + error) */ @BeanProperty var log: Array[String] = _ diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala index 973573f4945..1b629e05de2 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/ResultSetResult.scala @@ -20,6 +20,9 @@ package org.apache.linkis.ujes.client.response import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult import org.apache.linkis.httpclient.dws.response.DWSResult import org.apache.linkis.ujes.client.request.UserAction +import org.apache.linkis.ujes.client.utils.UJESClientUtils.evaluate + +import java.util import scala.beans.BeanProperty @@ -28,6 +31,35 @@ class ResultSetResult extends DWSResult with UserAction { private var `type`: String = _ + private var metadataList: util.List[util.Map[String, String]] = _ + + private var fileContentList: util.List[util.ArrayList[_]] = _ + + def getMetadataList: util.List[util.Map[String, String]] = { + metadata.asInstanceOf[util.List[util.Map[String, String]]] + } + + def getRowList: util.List[util.ArrayList[Any]] = { + val metaData = metadata.asInstanceOf[util.List[util.Map[String, String]]] + val fileContentList = fileContent.asInstanceOf[util.List[util.ArrayList[Any]]] + for (metaDataColnum <- 1 to metaData.size()) { + val col = metaData.get(metaDataColnum - 1) + if (!col.get("dataType").equals("string")) { + for (cursor <- 1 to fileContentList.size()) { + val colDataList = fileContentList.get(cursor - 1) + var colData = colDataList.get(metaDataColnum - 1) + if (null == colData) { + colData = null; + } else { + colData = evaluate(col.get("dataType"), colData.toString) + } + colDataList.set(metaDataColnum - 1, colData) + } + } + } + fileContentList + } + def setType(`type`: String): Unit = this.`type` = `type` def getType: String = `type` diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/UploadFileResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/UploadFileResult.scala new file mode 100644 index 00000000000..837399f2d9b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/UploadFileResult.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.ujes.client.request.UserAction + +@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/upload") +class UploadFileResult extends DWSResult with UserAction {} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala index 9615a89bc0e..e75929ea8f5 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/utils/UJESClientUtils.scala @@ -17,10 +17,19 @@ package org.apache.linkis.ujes.client.utils +import org.apache.linkis.ujes.client.exception.UJESClientBuilderException import org.apache.linkis.ujes.client.request.JobExecuteAction.{EngineType, RunType} +import org.apache.linkis.ujes.client.response.ResultSetResult + +import java.util +import java.util.Locale + +import com.google.gson.{Gson, JsonObject} object UJESClientUtils { + val gson: Gson = new Gson() + def toEngineType(engineType: String): EngineType = engineType match { case "spark" => EngineType.SPARK case "hive" => EngineType.HIVE @@ -48,4 +57,31 @@ object UJESClientUtils { case _ => EngineType.SPARK.SQL } + def evaluate(dataType: String, value: String): Any = { + if (value == null || value.equals("null") || value.equals("NULL") || value.equals("Null")) { + dataType.toLowerCase(Locale.getDefault) match { + case "string" | "char" | "varchar" | "nvarchar" => value + case _ => null + } + } else { + dataType.toLowerCase(Locale.getDefault) match { + case null => throw new UJESClientBuilderException("data is empty") + case "char" | "varchar" | "nvarchar" | "string" => value + case "short" => value.toShort + case "int" => value.toInt + case "long" => value.toLong + case "float" => value.toFloat + case "double" => value.toDouble + case "boolean" => value.toBoolean + case "byte" => value.toByte + case "bigint" => value.toLong + case "decimal" => value.toDouble + case "array" => gson.fromJson(value, classOf[util.ArrayList[Object]]) + case "map" => gson.fromJson(value, classOf[util.HashMap[Object, Object]]) + case "struct" => gson.fromJson(value, classOf[JsonObject]) + case _ => value + } + } + } + } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/response/JobInfoResultTest.java b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/response/JobInfoResultTest.java new file mode 100644 index 00000000000..4354db07d43 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/response/JobInfoResultTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.client.response; + +import org.apache.linkis.governance.common.entity.task.RequestPersistTask; +import org.apache.linkis.ujes.client.UJESClient; + +import org.assertj.core.util.Lists; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; + +class JobInfoResultTest { + + /** verify single path returns check point 1: return one path */ + @Test + void shouldReturnResultSetWithOrder() { + String[] toBeReturned = new String[] {"hdfs://hdfs/path/test/mockFile_1.dolphi"}; + String[] setList = getResultSetList(toBeReturned); + assertEquals(1, setList.length); + assertEquals("hdfs://hdfs/path/test/mockFile_1.dolphi", setList[0]); + } + + /** verify empty path set check point 1: return empty path */ + @Test + void shouldReturnEmptyResultSet() { + String[] toBeReturned = new String[] {}; + String[] setList = getResultSetList(toBeReturned); + assertEquals(0, setList.length); + } + + /** + * verify multiple result set, sorted by file name with numbers check point 1: sort asc check + * point 2: sort by number, not ascii + */ + @Test + void shouldReturnMultiResultSetWithOrder() { + String[] toBeReturned = + new String[] { + "/path/to/xxxx_1.txt", + "/some/path/xxxx_10.txt", + "/another/path/xxxx_0.txt", + "/another/path/xxxx_2.txt", + "/yet/another/path/xxxx_3.txt", + }; + String[] setList = getResultSetList(toBeReturned); + assertIterableEquals( + Lists.newArrayList( + "/another/path/xxxx_0.txt", + "/path/to/xxxx_1.txt", + "/another/path/xxxx_2.txt", + "/yet/another/path/xxxx_3.txt", + "/some/path/xxxx_10.txt"), + Lists.newArrayList(setList)); + } + + private static String[] getResultSetList(String[] toBeReturned) { + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + + UJESClient ujesClient = Mockito.mock(UJESClient.class); + Mockito.doReturn("Succeed").when(jobInfoResult).getJobStatus(); + RequestPersistTask persistTask = new RequestPersistTask(); + persistTask.setUmUser("test"); + persistTask.setResultLocation("mockPath"); + Mockito.doReturn(persistTask).when(jobInfoResult).getRequestPersistTask(); + + ResultSetListResult t = Mockito.spy(new ResultSetListResult()); + Mockito.when(ujesClient.executeUJESJob(any())).thenReturn(t); + Mockito.doReturn(toBeReturned).when(t).getResultSetList(); + + return jobInfoResult.getResultSetList(ujesClient); + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java new file mode 100644 index 00000000000..4c914bc3f4d --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.constant; + +public class CodeConstants { + // will auto append at end of scala code; make sure the last line is not a comment + public static String SCALA_CODE_AUTO_APPEND_CODE = "val linkisVar=123"; +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java new file mode 100644 index 00000000000..13cbac5577a --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.entity; + +public class TemplateConfKey { + + private String templateUuid; + + private String key; + + private String templateName; + + private String configValue; + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + @Override + public String toString() { + return "TemplateKey{" + + "templateUuid='" + + templateUuid + + '\'' + + ", key='" + + key + + '\'' + + ", templateName='" + + templateName + + '\'' + + ", configValue='" + + configValue + + '\'' + + '}'; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobAiRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobAiRequest.java new file mode 100644 index 00000000000..9cec73ea7b6 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobAiRequest.java @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.entity.job; + +import org.apache.linkis.manager.label.entity.Label; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** linkis_ps_ai_job_history */ +public class JobAiRequest { + + private Long id; + private String jobHistoryId; + private String submitUser; + private String executeUser; + private String submitCode; + private String executionCode; + private Map metrics = new HashMap<>(); + private Map params; + private List> labels; + private Integer errorCode; + private String errorDesc; + private String engineInstances; + private String engineType; + private Date changeTime; + private Date createdTime; + private Date updatedTime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getJobHistoryId() { + return jobHistoryId; + } + + public void setJobHistoryId(String jobHistoryId) { + this.jobHistoryId = jobHistoryId; + } + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getSubmitCode() { + return submitCode; + } + + public void setSubmitCode(String submitCode) { + this.submitCode = submitCode; + } + + public String getExecutionCode() { + return executionCode; + } + + public void setExecutionCode(String executionCode) { + this.executionCode = executionCode; + } + + public Map getMetrics() { + return metrics; + } + + public void setMetrics(Map metrics) { + this.metrics = metrics; + } + + public Map getParams() { + return params; + } + + public void setParams(Map params) { + this.params = params; + } + + public List> getLabels() { + return labels; + } + + public void setLabels(List> labels) { + this.labels = labels; + } + + public Integer getErrorCode() { + return errorCode; + } + + public void setErrorCode(Integer errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public String getEngineInstances() { + return engineInstances; + } + + public void setEngineInstances(String engineInstances) { + this.engineInstances = engineInstances; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public Date getChangeTime() { + return changeTime; + } + + public void setChangeTime(Date changeTime) { + this.changeTime = changeTime; + } + + public Date getCreatedTime() { + return createdTime; + } + + public void setCreatedTime(Date createdTime) { + this.createdTime = createdTime; + } + + public Date getUpdatedTime() { + return updatedTime; + } + + public void setUpdatedTime(Date updatedTime) { + this.updatedTime = updatedTime; + } + + @Override + public String toString() { + return "JobAiRequest{" + + "id=" + + id + + ", jobHistoryId='" + + jobHistoryId + + '\'' + + ", submitUser='" + + submitUser + + '\'' + + ", executeUser='" + + executeUser + + '\'' + + ", labels=" + + labels + + ", params=" + + params + + '}'; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/enums/OnceJobOperationBoundary.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/enums/OnceJobOperationBoundary.java new file mode 100644 index 00000000000..37c6fc8d92a --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/enums/OnceJobOperationBoundary.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.enums; + +public enum OnceJobOperationBoundary { + ECM("ecm"), + EC("ec"); + + private String name; + + OnceJobOperationBoundary(String name) { + this.name = name; + } + + public String getName() { + return name; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java index c0d295755a5..8f76c52c5d2 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java @@ -37,4 +37,8 @@ public class EngineConnExecutorErrorCode { public static final int SEND_TO_ENTRANCE_ERROR = 40105; public static final int INIT_EXECUTOR_FAILED = 40106; + + public static final int INVALID_APPLICATION_ID = 40107; + + public static final int ILLEGAL_USE_UDF_FUNCTION = 40108; } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java new file mode 100644 index 00000000000..e8b566cda11 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf; + +import org.apache.linkis.protocol.message.RequestProtocol; + +public class TemplateConfRequest implements RequestProtocol { + + private String templateUuid; + + private String templateName; + + public TemplateConfRequest(String templateUuid, String templateName) { + this.templateUuid = templateUuid; + this.templateName = templateName; + } + + public TemplateConfRequest(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java new file mode 100644 index 00000000000..8822fe988d8 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf; + +import org.apache.linkis.governance.common.entity.TemplateConfKey; + +import java.util.ArrayList; +import java.util.List; + +public class TemplateConfResponse { + + private List list = new ArrayList<>(); + + public List getList() { + return list; + } + + public void setList(List list) { + this.list = list; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/LoggerUtils.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/LoggerUtils.java new file mode 100644 index 00000000000..99addd4fedf --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/LoggerUtils.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.utils; + +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; +import org.apache.linkis.governance.common.constant.job.JobRequestConstants; + +import java.util.Map; + +import org.slf4j.MDC; + +public class LoggerUtils { + + public static void setJobIdMDC(String jobId) { + MDC.put(JobRequestConstants.JOB_ID(), jobId); + } + + public static void setJobIdMDC(Map props) { + if (GovernanceCommonConf.MDC_ENABLED()) { + String jobId = JobUtils.getJobIdFromMap(props); + MDC.put(JobRequestConstants.JOB_ID(), jobId); + } + } + + public static void removeJobIdMDC() { + MDC.remove(JobRequestConstants.JOB_ID()); + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala index 48f906bb8c7..b0c9dda6666 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala @@ -18,6 +18,7 @@ package org.apache.linkis.governance.common.conf import org.apache.linkis.common.conf.{CommonVars, Configuration} +import org.apache.linkis.governance.common.constant.ec.ECConstants object GovernanceCommonConf { @@ -40,10 +41,16 @@ object GovernanceCommonConf { val ENGINE_CONN_MANAGER_SPRING_NAME = CommonVars("wds.linkis.engineconn.manager.name", "linkis-cg-engineconnmanager") + val ENGINE_APPLICATION_MANAGER_SPRING_NAME = + CommonVars("wds.linkis.application.manager.name", "linkis-cg-linkismanager") + val ENGINE_CONN_PORT_RANGE = CommonVars("wds.linkis.engineconn.port.range", "-") val MANAGER_SERVICE_NAME = - CommonVars("wds.linkis.engineconn.manager.name", "linkis-cg-linkismanager") + CommonVars( + "wds.linkis.engineconn.manager.name", + GovernanceCommonConf.ENGINE_APPLICATION_MANAGER_SPRING_NAME.getValue + ) val ENTRANCE_SERVICE_NAME = CommonVars("wds.linkis.entrance.name", "linkis-cg-entrance") @@ -66,8 +73,26 @@ object GovernanceCommonConf { val ERROR_CODE_DESC_LEN = CommonVars("linkis.error.code.desc.len", 512, "Error code description maximum length").getValue + val FAKE_PROGRESS: Float = CommonVars[Float]("linkis.job.fake.progress", 0.99f).getValue + + val MDC_ENABLED = + CommonVars("linkis.mdc.log.enabled", true, "MDC Switch").getValue + def getEngineEnvValue(envKey: String): String = { CommonVars(envKey, "").getValue } + // value ECConstants.EC_CLIENT_TYPE_ATTACH + val EC_APP_MANAGE_MODE = + CommonVars("linkis.ec.app.manage.mode", "attach") + + /** + * DEFAULT_LOGPATH_PREFIX is the prefix that represents the default log storage path + * DEFAULT_LOGPATH_PREFIX 是表示默认的日志存储路径的前缀 和 结果集的前缀 + */ + val DEFAULT_LOGPATH_PREFIX = CommonVars[String]( + "wds.linkis.entrance.config.log.path", + CommonVars[String]("wds.linkis.filesystem.hdfs.root.path").getValue + ).getValue + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala index fe48f6887dc..c418201f439 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala @@ -35,4 +35,47 @@ object ECConstants { val YARN_QUEUE_NAME_CONFIG_KEY = "wds.linkis.rm.yarnqueue" + val QUEUE = "queue" + + val EC_CLIENT_TYPE_ATTACH = "attach" + + val EC_CLIENT_TYPE_DETACH = "detach" + + val YARN_APPID_NAME_KEY = "applicationId" + + val YARN_APP_URL_KEY = "applicationUrl" + + val YARN_APP_NAME_KEY = "appicationName" + + val YARN_MODE_KEY = "yarnMode" + + val EC_SERVICE_INSTANCE_KEY = "serviceInstance" + + val ECM_SERVICE_INSTANCE_KEY = "ecmServiceInstance" + + val MANAGER_SERVICE_INSTANCE_KEY = "managerServiceInstance" + + val NODE_STATUS_KEY = "nodeStatus" + + val EC_LAST_UNLOCK_TIMESTAMP = "lastUnlockTimestamp" + + val YARN_APP_TYPE_LIST_KEY = "yarnAppTypeList" + + val YARN_APP_STATE_LIST_KEY = "yarnAppStateList" + + val YARN_APP_TYPE_KEY = "yarnAppType" + + val YARN_APP_TYPE_SPARK = "spark" + + val YARN_APP_TYPE_FLINK = "flink" + + val EC_OPERATE_LIST = "list" + + val EC_OPERATE_STATUS = "status" + + val YARN_APP_RESULT_LIST_KEY = "yarnAppResultList" + + val HIVE_OPTS = "HIVE_OPTS" + + val SPARK_SUBMIT_OPTS = "SPARK_SUBMIT_OPTS" } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala index 8741c4297f2..73fa68b1752 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala @@ -17,11 +17,6 @@ package org.apache.linkis.governance.common.constant.job -/** - * @date - * 2021/3/17 - * @description - */ object JobRequestConstants { val JOB_ID = "jobId" @@ -34,4 +29,10 @@ object JobRequestConstants { val JOB_DETAIL_LIST = "jobDetailList" + val JOB_SOURCE_TAGS = "job.source.tags" + + val LINKIS_JDBC_DEFAULT_DB = "linkis.jdbc.default.db" + + val LINKIS_HIVE_EC_READ_RESULT_BY_OBJECT = "readResByObject" + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala index 19bd7f9cdb1..d5669ad428d 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala @@ -19,6 +19,7 @@ package org.apache.linkis.governance.common.paser import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils} import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.governance.common.constant.CodeConstants import org.apache.linkis.governance.common.paser.CodeType.CodeType import org.apache.commons.lang3.StringUtils @@ -86,6 +87,11 @@ abstract class CombinedEngineCodeParser extends CodeParser { } +/** + * Scala is no longer using Parser but instead using EmptyParser. If there is a comment at the end, + * it will cause the task to become stuck + */ +@deprecated class ScalaCodeParser extends SingleCodeParser with Logging { override val codeType: CodeType = CodeType.Scala @@ -109,6 +115,9 @@ class ScalaCodeParser extends SingleCodeParser with Logging { case _ => } if (statementBuffer.nonEmpty) codeBuffer.append(statementBuffer.mkString("\n")) + // Make sure the last line is not a comment + codeBuffer.append("\n") + codeBuffer.append(CodeConstants.SCALA_CODE_AUTO_APPEND_CODE) codeBuffer.toArray } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/AcrossClusterConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/AcrossClusterConf.scala new file mode 100644 index 00000000000..43d3c86b134 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/AcrossClusterConf.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf + +import org.apache.linkis.protocol.message.RequestProtocol + +trait AcrossClusterConf extends RequestProtocol + +case class AcrossClusterRequest(username: String) extends AcrossClusterConf + +case class AcrossClusterResponse(clusterName: String, queueName: String) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/DepartmentConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/DepartmentConf.scala new file mode 100644 index 00000000000..dbfe3f7b741 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/DepartmentConf.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf + +import org.apache.linkis.protocol.message.RequestProtocol + +trait DepartmentConf extends RequestProtocol + +case class DepartmentRequest(user: String) extends DepartmentConf + +case class DepartmentResponse(user: String, departmentId: String, departmentName: String) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala index e8d129453ee..17393bd04c0 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/conf/TenantConf.scala @@ -23,4 +23,15 @@ trait TenantConf extends RequestProtocol case class TenantRequest(user: String, creator: String) extends TenantConf -case class TenantResponse(user: String, creator: String, tenant: String) +case class TenantResponse(user: String, creator: String, isValid: String, tenant: String) + +case class DepartTenantRequest(creator: String, departmentId: String, departmentName: String) + extends TenantConf + +case class DepartTenantResponse( + creator: String, + departmentId: String, + departmentName: String, + isValid: String, + tenant: String +) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala index 2e447397872..9635aaf0c2c 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala @@ -17,7 +17,8 @@ package org.apache.linkis.governance.common.protocol.job -import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.entity.job.{JobAiRequest, JobRequest} +import org.apache.linkis.protocol.RetryableProtocol import org.apache.linkis.protocol.message.RequestProtocol import java.util @@ -25,7 +26,7 @@ import java.util.Date import scala.beans.BeanProperty -trait JobReq extends RequestProtocol +trait JobReq extends RequestProtocol with RetryableProtocol case class JobReqInsert(jobReq: JobRequest) extends JobReq @@ -37,6 +38,8 @@ case class JobReqQuery(jobReq: JobRequest) extends JobReq case class JobReqReadAll(jobReq: JobRequest) extends JobReq +case class JobAiReqInsert(jobReq: JobAiRequest) extends JobReq + class RequestOneJob extends JobReq { @BeanProperty @@ -51,3 +54,10 @@ class RequestOneJob extends JobReq { } case class RequestAllJob(instance: String) extends JobReq + +case class RequestFailoverJob( + reqMap: util.Map[String, java.lang.Long], + statusList: util.List[String], + startTimestamp: Long, + limit: Int = 10 +) extends JobReq diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala index b136c61099e..95f1a542ac6 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala @@ -24,14 +24,11 @@ import org.apache.linkis.protocol.message.RequestProtocol import java.util -case class ResponseTaskExecute(execId: String) - case class ResponseTaskProgress( execId: String, progress: Float, progressInfo: Array[JobProgressInfo] -) extends RetryableProtocol - with RequestProtocol +) extends RequestProtocol case class ResponseEngineLock(lock: String) @@ -42,34 +39,7 @@ case class EngineConcurrentInfo( failedTasks: Int ) -case class EngineOverloadInfo(maxMemory: Long, usedMemory: Long, systemCPUUsed: Float) - -case class ResponseEngineStatusChanged( - instance: String, - fromStatus: ExecutionNodeStatus, - toStatus: ExecutionNodeStatus, - overload: EngineOverloadInfo, - concurrent: EngineConcurrentInfo -) extends BroadcastProtocol - -case class ResponseEngineInfo( - createEntranceInstance: String, - creator: String, - user: String, - properties: util.Map[String, String] -) - -case class ResponseEngineStatus( - instance: String, - status: ExecutionNodeStatus, - overload: EngineOverloadInfo, - concurrent: EngineConcurrentInfo, - engineInfo: ResponseEngineInfo -) - -case class ResponseTaskLog(execId: String, log: String) - extends RetryableProtocol - with RequestProtocol +case class ResponseTaskLog(execId: String, log: String) extends RequestProtocol case class ResponseTaskError(execId: String, errorMsg: String) extends RetryableProtocol @@ -79,6 +49,15 @@ case class ResponseTaskStatus(execId: String, status: ExecutionNodeStatus) extends RetryableProtocol with RequestProtocol +class ResponseTaskStatusWithExecuteCodeIndex( + execId: String, + status: ExecutionNodeStatus, + private var _errorIndex: Int = -1 +) extends ResponseTaskStatus(execId, status) { + def errorIndex: Int = _errorIndex + def errorIndex_=(value: Int): Unit = _errorIndex = value +} + case class ResponseTaskResultSet(execId: String, output: String, alias: String) extends RetryableProtocol with RequestProtocol { diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala index 58c08b1f847..236046f3d4a 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala @@ -17,6 +17,8 @@ package org.apache.linkis.governance.common.utils +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} + import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.time.DateFormatUtils @@ -45,4 +47,20 @@ object ECPathUtils { suffix + File.separator + ticketId } + def getECLogDirSuffix( + engineTypeLabel: EngineTypeLabel, + userCreatorLabel: UserCreatorLabel, + ticketId: String + ): String = { + if (null == engineTypeLabel || null == userCreatorLabel) { + return "" + } + val suffix = ECPathUtils.getECWOrkDirPathSuffix( + userCreatorLabel.getUser, + ticketId, + engineTypeLabel.getEngineType + ) + suffix + File.separator + "logs" + } + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala index 684dd371a17..31d9b1e9b2c 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala @@ -145,9 +145,13 @@ class DefaultEngineConnArgumentsParser extends EngineConnArgumentsParser { val options = ArrayBuffer[String]() def write(confMap: Map[String, String], optionType: String): Unit = confMap.foreach { case (key, value) => - if (StringUtils.isNotEmpty(key) && StringUtils.isNotEmpty(value)) { + var realValue = value + if (key.startsWith("label") && StringUtils.isEmpty(realValue)) { + realValue = "true" + } + if (StringUtils.isNotEmpty(key) && StringUtils.isNotEmpty(realValue)) { options += optionType - options += (key + "=" + value) + options += (key + "=" + realValue) } } write(engineConnArguments.getEngineConnConfMap, ENGINE_CONN_CONF) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala index 52e7802164b..54927a84dfd 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala @@ -31,5 +31,4 @@ object GovernanceConstant { val REQUEST_ENGINE_STATUS_BATCH_LIMIT = 500 - def RESULTSET_INDEX: String = "resultsetIndex" } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala index 43fd598f71f..3da093558fd 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala @@ -24,10 +24,14 @@ import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.commons.lang3.StringUtils import java.io.File +import java.text.SimpleDateFormat import java.util +import java.util.Date object GovernanceUtils extends Logging { + val LINKIS_DEFAULT_RES_CREATOR = "linkis_default" + def killProcess(pid: String, desc: String, isSudo: Boolean): Unit = { val subProcessKillScriptPath = Configuration.getLinkisHome() + "/sbin/kill-process-by-pid.sh" if ( @@ -72,4 +76,93 @@ object GovernanceUtils extends Logging { } } + def killECProcessByPort(port: String, desc: String, isSudo: Boolean): Unit = { + val subProcessKillScriptPath = + Configuration.getLinkisHome() + "/sbin/kill-ec-process-by-port.sh" + if ( + StringUtils.isBlank(subProcessKillScriptPath) || !new File(subProcessKillScriptPath) + .exists() + ) { + logger.error(s"Failed to locate kill-script, $subProcessKillScriptPath not exist") + } else if (StringUtils.isNotBlank(port)) { + val cmd = if (isSudo) { + Array("sudo", "sh", subProcessKillScriptPath, port) + } else { + Array("sh", subProcessKillScriptPath, port) + } + logger.info( + s"Starting to kill process and sub-processes. desc: $desc Kill Command: " + cmd + .mkString(" ") + ) + + Utils.tryCatch { + val output = Utils.exec(cmd, 600 * 1000L) + logger.info(s"Kill Success! desc: $desc. msg:\n ${output}") + } { t => + logger.error(s"Kill error! desc: $desc.", t) + } + } + } + + /** + * find process id by port number + * @param processPort + * @return + */ + def findProcessIdentifier(processPort: String): String = { + val findCmd = + "sudo netstat -tunlp | grep :" + processPort + " | awk '{print $7}' | awk -F/ '{print $1}'" + val cmdList = new util.ArrayList[String] + cmdList.add("bash") + cmdList.add("-c") + cmdList.add(findCmd) + try Utils.exec(cmdList.toArray(new Array[String](0)), 5000L) + catch { + case e: Exception => + logger.warn("Method findPid failed, " + e.getMessage) + null + } + } + + /** + * get result path parentPath: resPrefix + dateStr + result + creator subPath: parentPath + + * executeUser + taskid + filename + * + * @param creator + * @return + */ + def getResultParentPath(creator: String): String = { + val resPrefix = GovernanceCommonConf.DEFAULT_LOGPATH_PREFIX + val resStb = new StringBuilder() + if (resStb.endsWith("/")) { + resStb.append(resPrefix) + } else { + resStb.append(resPrefix).append("/") + } + val dateFormat = new SimpleDateFormat("yyyy-MM-dd") + val hourFormat = new SimpleDateFormat("HH") // 新增:24小时制 + val date = new Date(System.currentTimeMillis) + val dateString = dateFormat.format(date) + val hourString = hourFormat.format(date) // 新增:当前小时(如 "08", "14") + if (Configuration.HDFS_HOUR_DIR_SWITCH) { + resStb + .append("result") + .append("/") + .append(dateString) + .append("/") + .append(hourString) + .append("/") // 新增:小时层级 + .append(creator) + .toString() + } else { + resStb + .append("result") + .append("/") + .append(dateString) + .append("/") + .append(creator) + .toString() + } + } + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala index c55db3d621d..8c6522cdbb6 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/JobUtils.scala @@ -43,4 +43,24 @@ object JobUtils { null } + def getJobSourceTagsFromStringMap(map: util.Map[String, String]): String = { + if (null != map && map.containsKey(JobRequestConstants.JOB_SOURCE_TAGS)) { + val value = map.get(JobRequestConstants.JOB_SOURCE_TAGS) + if (null != value) { + return value + } + } + null + } + + def getJobSourceTagsFromObjectMap(map: util.Map[String, Object]): String = { + if (null != map && map.containsKey(JobRequestConstants.JOB_SOURCE_TAGS)) { + val value = map.get(JobRequestConstants.JOB_SOURCE_TAGS) + if (null != value) { + return value.toString + } + } + null + } + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala index 3cfe787f615..04adf3446cb 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala @@ -29,7 +29,7 @@ class ScalaCodeParserTest { "val codeBuffer = new ArrayBuffer[String]()\n val statementBuffer = new ArrayBuffer[String]()" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(scalaCode) - Assertions.assertTrue(array.size == 1) + Assertions.assertTrue(array.size == 3) } @@ -41,7 +41,7 @@ class ScalaCodeParserTest { " def addInt( a:Int, b:Int )\n var sum:Int = 0\n sum = a + b\n return sum\n }" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(abnormalCode) - Assertions.assertTrue(array.length == 1) + Assertions.assertTrue(array.length == 3) } @@ -54,7 +54,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(importCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.length == 4) } @@ -68,7 +68,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val arrayResult1 = scalaCodeParser.parse(specialCodeExp1) - Assertions.assertTrue(arrayResult1.length == 2) + Assertions.assertTrue(arrayResult1.length == 4) val specialCodeExp2 = " @BeanProperty\n var id: Long = _\n @BeanProperty\n var status: Int = 0\n " + @@ -79,7 +79,7 @@ class ScalaCodeParserTest { ".append(data, that.data)\n .isEquals\n }" val arrayResult2 = scalaCodeParser.parse(specialCodeExp2) - Assertions.assertTrue(arrayResult2.length == 1) + Assertions.assertTrue(arrayResult2.length == 3) } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala index 891d43c8b72..22f3cee2332 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala @@ -31,7 +31,6 @@ class GovernanceConstantTest { val taskresourceversionstr = GovernanceConstant.TASK_RESOURCE_VERSION_STR val taskresourcefilenamestr = GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR val requestenginestatusbatchlimit = GovernanceConstant.REQUEST_ENGINE_STATUS_BATCH_LIMIT - val resultsetindex = GovernanceConstant.RESULTSET_INDEX Assertions.assertEquals("source", tasksourcemapkey) Assertions.assertEquals("resources", taskresourcesstr) @@ -39,7 +38,6 @@ class GovernanceConstantTest { Assertions.assertEquals("version", taskresourceversionstr) Assertions.assertEquals("fileName", taskresourcefilenamestr) Assertions.assertTrue(500 == requestenginestatusbatchlimit.intValue()) - Assertions.assertEquals("resultsetIndex", resultsetindex) } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala index fade444fa0c..4b5b1fab9e1 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala @@ -77,7 +77,7 @@ class UnixProcessEngineCommandBuilder extends ShellProcessEngineCommandBuilder { newLine("linkis_engineconn_errorcode=$?") newLine("if [ $linkis_engineconn_errorcode -ne 0 ]") newLine("then") - newLine(" tail -1000 ${LOG_DIRS}/stderr") + newLine(" timeout 10 tail -1000 ${LOG_DIRS}/stderr") newLine(" exit $linkis_engineconn_errorcode") newLine("fi") } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala index 91aa93e5fcf..d6ba9a030a4 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala @@ -35,6 +35,9 @@ import org.apache.linkis.manager.engineplugin.common.launch.process.{ } import org.apache.linkis.manager.engineplugin.common.launch.process.Environment._ import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants._ +import org.apache.linkis.manager.label.conf.LabelCommonConfig +import org.apache.linkis.manager.label.entity.engine.EngineType +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.commons.io.FileUtils import org.apache.commons.lang3.StringUtils @@ -166,8 +169,15 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { .findAvailPortByRange(GovernanceCommonConf.ENGINE_CONN_PORT_RANGE.getValue) .toString - var springConf = Map("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") - + var springConf = + Map[String, String]("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") + val properties = + PortUtils.readFromProperties(Configuration.getLinkisHome + "/conf/version.properties") + if (StringUtils.isNotBlank(properties.getProperty("version"))) { + springConf += ("eureka.instance.metadata-map.linkis.app.version" -> properties.getProperty( + "version" + )) + } request.creationDesc.properties.asScala.filter(_._1.startsWith("spring.")).foreach { case (k, v) => springConf = springConf + (k -> v) @@ -216,7 +226,31 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { ) ) - val engineConnEnvKeys = request.environment.remove(ENGINECONN_ENVKEYS.toString) + var engineConnEnvKeys = request.environment.remove(ENGINECONN_ENVKEYS.toString) + // 处理spark环境问题,兼容spark切换spark-cmd后spark2能正常使用 + val engineTypeLabel = LabelUtil.getEngineTypeLabel(request.labels) + if (engineTypeLabel.getEngineType.equals(EngineType.SPARK.toString)) { + val (sparkHome, sparkCmd, sparkEnginePath, sparkConfig) = + if (engineTypeLabel.getVersion.contains(LabelCommonConfig.SPARK3_ENGINE_VERSION.getValue)) { + ( + LabelCommonConfig.SPARK3_ENGINE_HOME.getValue, + LabelCommonConfig.SPARK3_ENGINE_CMD.getValue, + LabelCommonConfig.SPARK3_ENGINE_PATH.getValue, + LabelCommonConfig.SPARK3_ENGINE_CONFIG.getValue + ) + } else { + ( + LabelCommonConfig.SPARK_ENGINE_HOME.getValue, + LabelCommonConfig.SPARK_ENGINE_CMD.getValue, + LabelCommonConfig.SPARK_ENGINE_PATH.getValue, + LabelCommonConfig.SPARK_ENGINE_CONFIG.getValue + ) + } + processBuilder.setEnv(LabelCommonConfig.SPARK_ENGINE_HOME_CONF, sparkHome) + processBuilder.setEnv(LabelCommonConfig.SPARK_ENGINE_CMD_CONF, sparkCmd) + processBuilder.setEnv(LabelCommonConfig.SPARK_ENGINE_PATH_CONF, sparkEnginePath) + processBuilder.setEnv(LabelCommonConfig.SPARK_ENGINE_CONF_DIR, sparkConfig) + } logger.debug(s"ENGINECONN_ENVKEYS: " + engineConnEnvKeys) // set other env val engineConnEnvKeyArray = engineConnEnvKeys.split(",") @@ -255,14 +289,15 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { } } + /** + * process exit code if process is null retur errorcode 10 + * @return + */ def processWaitFor: Int = { if (process != null) { process.waitFor } else { - throw new ECMCoreException( - CAN_NOT_GET_TERMINATED.getErrorCode, - CAN_NOT_GET_TERMINATED.getErrorDesc - ) + 10 } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala index 160025ed51d..395c9258b86 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala @@ -34,9 +34,9 @@ trait NodeHealthReport { def getNodeMsg: String - def getUsedResource: Resource +// def getUsedResource: Resource - def setUsedResource(resource: Resource): Unit +// def setUsedResource(resource: Resource): Unit def getTotalResource: Resource diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala index 4612467193e..85520204936 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala @@ -17,15 +17,16 @@ package org.apache.linkis.ecm.core.utils -import org.apache.linkis.common.utils.Utils +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils -import java.io.IOException +import java.io.{BufferedReader, FileReader, IOException} import java.net.ServerSocket +import java.util.Properties -object PortUtils { +object PortUtils extends Logging { /** * portRange: '-' is the separator @@ -62,4 +63,23 @@ object PortUtils { Utils.tryFinally(socket.getLocalPort)(IOUtils.closeQuietly(socket)) } + def readFromProperties(propertiesFile: String): Properties = { + val properties: Properties = new Properties + var reader: BufferedReader = null; + try { + reader = new BufferedReader(new FileReader(propertiesFile)) + properties.load(reader) + } catch { + case e: Exception => + logger.warn(s"loading vsersion faild with path $propertiesFile error:$e") + } finally { + try if (reader != null) reader.close + catch { + case e: Exception => + logger.warn(s"try to close buffered reader with error:${e.getMessage}") + } + } + properties + } + } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml index 9f46630b667..41022d30da8 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml @@ -30,7 +30,7 @@ org.apache.linkis - linkis-udf-client + linkis-pes-rpc-client ${project.version} @@ -55,7 +55,7 @@ org.apache.linkis - linkis-bml-client + linkis-pes-client ${project.version} @@ -73,13 +73,6 @@ provided - - org.json4s - json4s-jackson_${scala.binary.version} - ${json4s.version} - provided - - diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml index da4e71662bb..f24c7e21a16 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml @@ -244,9 +244,6 @@ org.slf4j:jul-to-slf4j:jar org.slf4j:slf4j-api:jar org.springframework.boot:spring-boot:jar - org.springframework.boot:spring-boot-actuator:jar - org.springframework.boot:spring-boot-actuator-autoconfigure:jar - org.springframework.boot:spring-boot-autoconfigure:jar org.springframework.boot:spring-boot-starter:jar org.springframework.boot:spring-boot-starter-actuator:jar org.springframework.boot:spring-boot-starter-aop:jar diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java index ca4412824dd..0c745ef64dd 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java @@ -33,7 +33,15 @@ public enum EngineconnServerErrorCodeSummary implements LinkisErrorCode { 11110, "the parameters of engineConnInstance and ticketId are both not exists.(engineConnInstance 和ticketId 的参数都不存在.)"), LOG_IS_NOT_EXISTS(11110, "Log directory {0} does not exists.(日志目录 {0} 不存在.)"), - FAILED_TO_DOWNLOAD(911115, "failed to downLoad(下载失败)"); + FAILED_TO_DOWNLOAD(911115, "failed to downLoad(下载失败)"), + FILE_IS_OVERSIZE(911116, "Download file has exceeded 100MB(下载文件已超过100M)"), + PARAMETER_NOT_NULL(911117, "Parameter {0} cannot be empty (参数 {0} 不能为空)"), + LOGTYPE_ERROR( + 911118, + "logType only supports stdout, stderr, gc, yarnApp(logType仅支持stdout,stderr,gc,yarnApp)"), + NOT_PERMISSION( + 911119, "You {0} have no permission to download Log in ECM {1}(用户 {0} 无权限下载 ECM {1} 日志)"), + ; /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java new file mode 100644 index 00000000000..2de878a24c1 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.restful; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.Consts; + +import org.springframework.web.bind.annotation.*; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.attribute.FileOwnerAttributeView; +import java.nio.file.attribute.UserPrincipal; +import java.text.MessageFormat; + +import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary.*; + +@Api(tags = "ECM") +@RequestMapping(path = "/engineconnManager") +@RestController +public class ECMRestfulApi { + + private final Logger logger = LoggerFactory.getLogger(ECMRestfulApi.class); + + /** + * * Reason for using the get method: Added gateway forwarding rules, which only support get + * requests + * + * @param req + * @param response + * @param emInstance + * @param instance + * @param logDirSuffix + * @param logType + * @throws IOException + */ + @ApiOperation( + value = "downloadEngineLog", + notes = "download engine log", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "emInstance", + required = true, + dataType = "String", + example = "xxx0002:9102"), + @ApiImplicitParam( + name = "instance", + required = true, + dataType = "String", + example = "xxx0002:35873"), + @ApiImplicitParam(name = "logDirSuffix", required = true, dataType = "String"), + @ApiImplicitParam(name = "logType", required = true, dataType = "String") + }) + @ApiOperationSupport(ignoreParameters = {"json"}) + @RequestMapping(path = "/downloadEngineLog", method = RequestMethod.GET) + public Message downloadEngineLog( + HttpServletRequest req, + HttpServletResponse response, + @RequestParam(value = "emInstance") String emInstance, + @RequestParam(value = "instance") String instance, + @RequestParam(value = "logDirSuffix") String logDirSuffix, + @RequestParam(value = "logType") String logType) + throws IOException { + String userName = ModuleUserUtils.getOperationUser(req, "downloadEngineLog"); + if (StringUtils.isBlank(instance)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "instance")); + } + if (StringUtils.isBlank(logDirSuffix)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "logDirSuffix")); + } + if (StringUtils.isBlank(logType)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "logType")); + } else if (!logType.equals("stdout") + && !logType.equals("stderr") + && !logType.equals("gc") + && !logType.equals("yarnApp")) { + return Message.error(LOGTYPE_ERROR.getErrorDesc()); + } + // 获取文件的权限归属者 + FileOwnerAttributeView ownerView = + Files.getFileAttributeView( + Paths.get(logDirSuffix + "/" + logType), FileOwnerAttributeView.class); + UserPrincipal owner = ownerView.getOwner(); + if (!owner.getName().equals(userName) + && Configuration.isNotAdmin(userName) + && Configuration.isNotJobHistoryAdmin(userName)) { + return Message.error( + MessageFormat.format(NOT_PERMISSION.getErrorDesc(), userName, emInstance)); + } + File inputFile = new File(logDirSuffix, logType); + if (!inputFile.exists()) { + return Message.error(MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc(), logDirSuffix)); + } else { + long fileSizeInBytes = inputFile.length(); + long fileSizeInMegabytes = fileSizeInBytes / (1024 * 1024); + if (fileSizeInMegabytes > 100) { + return Message.error(MessageFormat.format(FILE_IS_OVERSIZE.getErrorDesc(), logDirSuffix)); + } + ServletOutputStream outputStream = null; + FileInputStream inputStream = null; + BufferedInputStream fis = null; + PrintWriter writer = null; + try { + inputStream = new FileInputStream(inputFile); + fis = new BufferedInputStream(inputStream); + byte[] buffer = new byte[1024]; + int bytesRead = 0; + response.setCharacterEncoding(Consts.UTF_8.toString()); + java.nio.file.Path source = Paths.get(inputFile.getPath()); + response.addHeader("Content-Type", Files.probeContentType(source)); + // filename eg:xxx002_11529_stdout.txt + response.addHeader( + "Content-Disposition", + "attachment;filename=" + instance.replace(":", "_") + "_" + logType + ".txt"); + response.addHeader("Content-Length", fileSizeInBytes + ""); + outputStream = response.getOutputStream(); + while ((bytesRead = fis.read(buffer, 0, 1024)) != -1) { + outputStream.write(buffer, 0, bytesRead); + } + } catch (IOException e) { + logger.error("Download EngineLog Failed Msg :", e); + response.reset(); + response.setCharacterEncoding(Consts.UTF_8.toString()); + response.setContentType("text/plain; charset=utf-8"); + writer = response.getWriter(); + writer.append("error(错误):" + e.getMessage()); + writer.flush(); + } finally { + if (outputStream != null) { + outputStream.flush(); + } + IOUtils.closeQuietly(outputStream); + IOUtils.closeQuietly(fis); + IOUtils.closeQuietly(inputStream); + } + return Message.ok(); + } + } +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java index 111ad896e04..409f0d7e982 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java @@ -46,7 +46,7 @@ public void run() { cmdlist.add("sh"); cmdlist.add(shellPath + "linkis-ec-clear.sh"); try { - Utils.exec(cmdlist.toArray(new String[0]), 3000L); + Utils.exec(cmdlist.toArray(new String[0]), 1800000L); } catch (Exception e) { logger.warn("Shell linkis-ec-clear.sh execution failed, msg:" + e.getMessage()); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java new file mode 100644 index 00000000000..2e351b00df4 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java @@ -0,0 +1,296 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.service.impl; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.ecm.server.conf.ECMConfiguration; +import org.apache.linkis.ecm.server.service.EngineConnKillService; +import org.apache.linkis.ecm.utils.ECMCacheUtils; +import org.apache.linkis.engineconn.common.conf.EngineConnConf; +import org.apache.linkis.governance.common.utils.GovernanceUtils; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineSuicideRequest; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DefaultEngineConnKillService implements EngineConnKillService { + + private static final Logger logger = LoggerFactory.getLogger(DefaultEngineConnKillService.class); + + private static final ThreadPoolExecutor ecYarnAppKillService = + Utils.newCachedThreadPool(10, "ECM-Kill-EC-Yarn-App", true); + + @Override + @Receiver + public EngineStopResponse dealEngineConnStop(EngineStopRequest engineStopRequest) { + logger.info("received EngineStopRequest " + engineStopRequest); + String pid = null; + if (AMConstant.PROCESS_MARK.equals(engineStopRequest.getIdentifierType()) + && StringUtils.isNotBlank(engineStopRequest.getIdentifier())) { + ECMCacheUtils.putStopECToCache(engineStopRequest.getServiceInstance(), engineStopRequest); + pid = engineStopRequest.getIdentifier(); + } + logger.info("dealEngineConnStop return pid: {}", pid); + EngineStopResponse response = new EngineStopResponse(); + if (StringUtils.isNotBlank(pid)) { + if (!killEngineConnByPid(pid, engineStopRequest.getServiceInstance())) { + response.setStopStatus(false); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " failed."); + } else { + response.setStopStatus(true); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " succeed."); + } + } else { + String processPort = engineStopRequest.getServiceInstance().getInstance().split(":")[1]; + logger.warn("Kill EC {} by port {}", engineStopRequest.getServiceInstance(), processPort); + if (!killEngineConnByPort(processPort, engineStopRequest.getServiceInstance())) { + response.setStopStatus(false); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " failed."); + } else { + response.setStopStatus(true); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " succeed."); + } + } + + // Requires default kill yarn appid + if (AMConstant.PROCESS_MARK.equals(engineStopRequest.getIdentifierType())) { + killYarnAppIdOfOneEc(engineStopRequest); + } + + if (AMConstant.CLUSTER_PROCESS_MARK.equals(engineStopRequest.getIdentifierType()) + && engineStopRequest.getIdentifier() != null) { + List appIds = new ArrayList<>(); + appIds.add(engineStopRequest.getIdentifier()); + GovernanceUtils.killYarnJobApp(appIds); + } + + if (!response.getStopStatus()) { + EngineSuicideRequest request = + new EngineSuicideRequest( + engineStopRequest.getServiceInstance(), engineStopRequest.getUser()); + try { + Sender.getSender(engineStopRequest.getServiceInstance()).send(request); + response.setStopStatus(true); + response.setMsg(response.getMsg() + " Now send suicide request to engine."); + } catch (Exception e) { + response.setMsg( + response.getMsg() + " Sended suicide request to engine error, " + e.getMessage()); + } + } + return response; + } + + public void killYarnAppIdOfOneEc(EngineStopRequest engineStopRequest) { + String logDirSuffix = engineStopRequest.getLogDirSuffix(); + ServiceInstance serviceInstance = engineStopRequest.getServiceInstance(); + String engineType = engineStopRequest.getEngineType(); + String engineConnInstance = serviceInstance.toString(); + String engineLogDir; + if (logDirSuffix.startsWith(ECMConfiguration.ENGINECONN_ROOT_DIR())) { + engineLogDir = logDirSuffix; + } else { + engineLogDir = ECMConfiguration.ENGINECONN_ROOT_DIR() + File.separator + logDirSuffix; + } + logger.info( + "try to kill yarn app ids in the engine of: [{}] engineLogDir: [{}]", + engineConnInstance, + engineLogDir); + final String errEngineLogPath = engineLogDir.concat(File.separator).concat("yarnApp"); + logger.info( + "try to parse the yarn app id from the engine err log file path: [{}]", errEngineLogPath); + File file = new File(errEngineLogPath); + if (file.exists()) { + ecYarnAppKillService.execute( + () -> { + BufferedReader in = null; + try { + in = new BufferedReader(new FileReader(errEngineLogPath)); + String line; + String regex = getYarnAppRegexByEngineType(engineType); + if (StringUtils.isBlank(regex)) { + return; + } + Pattern pattern = Pattern.compile(regex); + List appIds = new ArrayList<>(); + while ((line = in.readLine()) != null) { + if (StringUtils.isNotBlank(line)) { + Matcher mApp = pattern.matcher(line); + if (mApp.find()) { + String candidate1 = mApp.group(mApp.groupCount()); + if (!appIds.contains(candidate1)) { + appIds.add(candidate1); + } + } + } + } + GovernanceUtils.killYarnJobApp(appIds); + logger.info("finished kill yarn app ids in the engine of ({}).", engineConnInstance); + } catch (IOException ioEx) { + if (ioEx instanceof FileNotFoundException) { + logger.error("the engine log file {} not found.", errEngineLogPath); + } else { + logger.error( + "the engine log file parse failed. the reason is {}", ioEx.getMessage()); + } + } finally { + IOUtils.closeQuietly(in); + } + }); + } + } + + private String getYarnAppRegexByEngineType(String engineType) { + if (StringUtils.isBlank(engineType)) { + return ""; + } + String regex; + switch (engineType) { + case "spark": + case "shell": + regex = EngineConnConf.SPARK_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); + break; + case "sqoop": + regex = EngineConnConf.SQOOP_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); + break; + case "seatunnel": + case "flink": + case "hive": + regex = EngineConnConf.HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); + break; + default: + regex = ""; + } + return regex; + } + + private boolean killEngineConnByPid(String processId, ServiceInstance serviceInstance) { + logger.info("try to kill {} toString with pid({}).", serviceInstance.toString(), processId); + if (StringUtils.isNotBlank(processId)) { + if (ECMConfiguration.ECM_PROCESS_SCRIPT_KILL()) { + GovernanceUtils.killProcess(processId, serviceInstance.toString(), true); + } else { + killProcessByKillCmd(processId, serviceInstance.toString()); + } + return !isProcessAlive(processId); + } else { + logger.warn("cannot kill {} with empty pid.", serviceInstance); + return false; + } + } + + private boolean killEngineConnByPort(String port, ServiceInstance serviceInstance) { + logger.info("try to kill {} toString with port({}).", serviceInstance.toString(), port); + if (StringUtils.isNotBlank(port)) { + GovernanceUtils.killECProcessByPort(port, serviceInstance.toString(), true); + return !isProcessAliveByPort(port); + } else { + logger.warn("cannot kill {} with empty port.", serviceInstance); + return false; + } + } + + private boolean isProcessAlive(String pid) { + String findCmd = + "ps -ef | grep " + + pid + + " | grep EngineConnServer | awk '{print \"exists_\"$2}' | grep " + + pid + + "|| true"; + List cmdList = new ArrayList<>(); + cmdList.add("bash"); + cmdList.add("-c"); + cmdList.add(findCmd); + try { + String rs = Utils.exec(cmdList.toArray(new String[0]), 5000L); + return null != rs && rs.contains("exists_" + pid); + } catch (Exception e) { + logger.warn("Method isProcessAlive failed", e); + return false; + } + } + + private boolean isProcessAliveByPort(String port) { + String findCmd = + "ps -ef | grep server.port= " + + port + + " | grep EngineConnServer | awk -F \"server.port=\" '{print \"exists_\"$2}'"; + List cmdList = new ArrayList<>(); + cmdList.add("bash"); + cmdList.add("-c"); + cmdList.add(findCmd); + try { + String rs = Utils.exec(cmdList.toArray(new String[0]), 5000L); + return null != rs && rs.contains("exists_" + port); + } catch (Exception e) { + logger.warn("Method isProcessAlive failed", e); + return false; + } + } + + private void killProcessByKillCmd(String pid, String desc) { + String k15cmd = "sudo kill " + pid; + String k9cmd = "sudo kill -9 " + pid; + int tryNum = 0; + try { + while (isProcessAlive(pid) && tryNum <= 3) { + logger.info( + "{} still alive with pid({}), use shell command to kill it. try {}++", + desc, + pid, + tryNum++); + if (tryNum <= 3) { + Utils.exec(k15cmd.split(" "), 3000L); + } else { + logger.info( + "{} still alive with pid({}). try {}, use shell command to kill -9 it", + desc, + pid, + tryNum); + Utils.exec(k9cmd.split(" "), 3000L); + } + Thread.sleep(5000); + } + } catch (InterruptedException e) { + logger.error("Interrupted while killing engine {} with pid({})." + desc, pid); + } + } +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java new file mode 100644 index 00000000000..52d140055df --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.utils; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; + +import java.util.concurrent.TimeUnit; + +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; + +public class ECMCacheUtils { + private static Cache ecStopRequestCache = + CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.SECONDS).build(); + + public static void putStopECToCache( + ServiceInstance serviceInstance, EngineStopRequest engineStopRequest) { + ecStopRequestCache.put(serviceInstance, engineStopRequest); + } + + public static EngineStopRequest getStopEC(ServiceInstance serviceInstance) { + return ecStopRequestCache.getIfPresent(serviceInstance); + } +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala index 0c48d730a37..dbd24e35625 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala @@ -57,10 +57,9 @@ object ECMConfiguration { val ECM_MAX_CREATE_INSTANCES: Int = CommonVars[Integer]("wds.linkis.ecm.engineconn.instances.max", 50).getValue - val ECM_PROTECTED_MEMORY: Long = CommonVars[Long]( - "wds.linkis.ecm.protected.memory", - ByteTimeUtils.byteStringAsBytes("4g") - ).getValue + val ECM_PROTECTED_MEMORY: Long = ByteTimeUtils.byteStringAsBytes( + CommonVars[String]("wds.linkis.ecm.protected.memory", "10g").getValue + ) val ECM_PROTECTED_CPU_LOAD: Double = CommonVars[Double]("wds.linkis.ecm.protected.cpu.load", 0.98d).getValue @@ -80,7 +79,7 @@ object ECMConfiguration { GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue val ECM_HEALTH_REPORT_PERIOD: Long = - CommonVars("wds.linkis.ecm.health.report.period", 30).getValue + CommonVars("wds.linkis.ecm.health.report.period", 10).getValue val ECM_HEALTH_REPORT_DELAY: Long = CommonVars("wds.linkis.ecm.health.report.delay", 10).getValue @@ -116,4 +115,8 @@ object ECMConfiguration { val ECM_PROCESS_SCRIPT_KILL: Boolean = CommonVars[Boolean]("wds.linkis.ecm.script.kill.engineconn", true).getValue + val EC_CAN_RETRY_EXIT_CODES: Array[Int] = + CommonVars[String]("linkis.ecm.can.retry.exit.codes", "143").getValue + .split(",") + .map(_.toInt); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala index 6bcb6c3b8b9..db4ccea7f9f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala @@ -19,22 +19,11 @@ package org.apache.linkis.ecm.server.listener import org.apache.linkis.ecm.core.engineconn.EngineConn import org.apache.linkis.ecm.core.listener.ECMEvent -import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol} +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest case class ECMReadyEvent(params: Array[String]) extends ECMEvent case class ECMClosedEvent() extends ECMEvent -case class EngineConnStatusChageEvent(from: NodeStatus, to: NodeStatus) extends ECMEvent - -case class YarnAppIdCallbackEvent(protocol: YarnAPPIdCallbackProtocol) extends ECMEvent - -case class YarnInfoCallbackEvent(protocol: YarnInfoCallbackProtocol) extends ECMEvent - -case class EngineConnPidCallbackEvent(protocol: ResponseEngineConnPid) extends ECMEvent - -case class EngineConnAddEvent(conn: EngineConn) extends ECMEvent - -case class EngineConnStatusChangeEvent(tickedId: String, updateStatus: NodeStatus) extends ECMEvent +case class EngineConnStopEvent(engineConn: EngineConn, engineStopRequest: EngineStopRequest) + extends ECMEvent diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala index bc856ba6814..66327dadcf0 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala @@ -17,15 +17,13 @@ package org.apache.linkis.ecm.server.operator -import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.exception.ECMErrorException -import org.apache.linkis.ecm.server.service.{EngineConnListService, LocalDirsHandleService} +import org.apache.linkis.ecm.server.service.LocalDirsHandleService import org.apache.linkis.manager.common.operator.Operator -import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest import org.apache.commons.io.IOUtils import org.apache.commons.io.input.ReversedLinesFileReader @@ -37,14 +35,10 @@ import java.text.MessageFormat import java.util import java.util.Collections -import scala.collection.JavaConverters.asScalaBufferConverter import scala.util.matching.Regex class EngineConnLogOperator extends Operator with Logging { - private var engineConnListService: EngineConnListService = _ - private var localDirsHandleService: LocalDirsHandleService = _ - override def getNames: Array[String] = Array(EngineConnLogOperator.OPERATOR_NAME) override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { @@ -95,10 +89,7 @@ class EngineConnLogOperator extends Operator with Logging { var readLine, skippedLine, lineNum = 0 var rowIgnore = false var ignoreLine = 0 - val linePattern = Option(EngineConnLogOperator.MULTILINE_PATTERN.getValue) match { - case Some(pattern) => pattern.r - case _ => null - } + val linePattern = getLinePattern val maxMultiline = EngineConnLogOperator.MULTILINE_MAX.getValue Utils.tryFinally { var line = randomAndReversedReadLine() @@ -155,59 +146,20 @@ class EngineConnLogOperator extends Operator with Logging { logPath } + protected def getLinePattern: Regex = { + Option(EngineConnLogOperator.MULTILINE_PATTERN.getValue) match { + case Some(pattern) => pattern.r + case _ => null + } + } + protected def getEngineConnInfo(implicit parameters: Map[String, Any] ): (String, String, String) = { - if (engineConnListService == null) { - engineConnListService = - DataWorkCloudApplication.getApplicationContext.getBean(classOf[EngineConnListService]) - localDirsHandleService = - DataWorkCloudApplication.getApplicationContext.getBean(classOf[LocalDirsHandleService]) - } val logDIrSuffix = getAs("logDirSuffix", "") - val (engineConnLogDir, engineConnInstance, ticketId) = - if (StringUtils.isNotBlank(logDIrSuffix)) { - val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix - val ticketId = getAs("ticketId", "") - (ecLogPath, "", ticketId) - } else { - val engineConnInstance = getAs( - ECMOperateRequest.ENGINE_CONN_INSTANCE_KEY, - getAs[String]("engineConnInstance", null) - ) - Option(engineConnInstance) - .flatMap { instance => - engineConnListService.getEngineConns.asScala.find( - _.getServiceInstance.getInstance == instance - ) - } - .map(engineConn => - ( - engineConn.getEngineConnManagerEnv.engineConnLogDirs, - engineConnInstance, - engineConn.getTickedId - ) - ) - .getOrElse { - val ticketId = getAs("ticketId", "") - if (StringUtils.isBlank(ticketId)) { - throw new ECMErrorException( - BOTH_NOT_EXISTS.getErrorCode, - s"the parameters of ${ECMOperateRequest.ENGINE_CONN_INSTANCE_KEY}, engineConnInstance and ticketId are both not exists." - ) - } - val logDir = engineConnListService - .getEngineConn(ticketId) - .map(_.getEngineConnManagerEnv.engineConnLogDirs) - .getOrElse { - val creator = getAsThrow[String]("creator") - val engineConnType = getAsThrow[String]("engineConnType") - localDirsHandleService.getEngineConnLogDir(creator, ticketId, engineConnType) - } - (logDir, engineConnInstance, ticketId) - } - } - (ticketId, engineConnInstance, engineConnLogDir) + val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix + val ticketId = getAs("ticketId", "") + (ticketId, "", ecLogPath) } private def includeLine( diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala index e00d16b5199..36e7ddfc5f5 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala @@ -17,20 +17,36 @@ package org.apache.linkis.ecm.server.operator -import org.apache.linkis.common.exception.LinkisCommonErrorException +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.Utils -import org.apache.linkis.ecm.core.conf.ECMErrorCode import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ import org.apache.linkis.ecm.server.exception.ECMErrorException +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.utils.StorageUtils + +import org.apache.commons.lang3.StringUtils import java.io.File import java.text.MessageFormat -import java.util.concurrent.TimeUnit +import java.util +import java.util.concurrent.{Callable, ConcurrentHashMap, ExecutorService, Future, TimeUnit} import scala.collection.JavaConverters._ +import scala.util.matching.Regex class EngineConnYarnLogOperator extends EngineConnLogOperator { + private implicit val fs: FileSystem = + FSFactory.getFs(StorageUtils.FILE).asInstanceOf[FileSystem] + + /** + * Yarn log fetchers + */ + private def yarnLogFetchers: ConcurrentHashMap[String, Future[String]] = + new ConcurrentHashMap[String, Future[String]]() + override def getNames: Array[String] = Array(EngineConnYarnLogOperator.OPERATOR_NAME) override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { @@ -38,75 +54,118 @@ class EngineConnYarnLogOperator extends EngineConnLogOperator { Utils.tryFinally { result = super.apply(parameters) result - } { - result.get("logPath") match { - case Some(path: String) => - val logFile = new File(path) - if (logFile.exists() && logFile.getName.startsWith(".")) { - // If is a temporary file, drop it - logger.info(s"Delete the temporary yarn log file: [$path]") - if (!logFile.delete()) { - logger.warn(s"Fail to delete the temporary yarn log file: [$path]") - } - } - } - } + } {} } override def getLogPath(implicit parameters: Map[String, Any]): File = { + val applicationId = getAsThrow[String]("yarnApplicationId") val (ticketId, engineConnInstance, engineConnLogDir) = getEngineConnInfo(parameters) - val rootLogDir = new File(engineConnLogDir) - if (!rootLogDir.exists() || !rootLogDir.isDirectory) { + val rootLogPath = EngineConnYarnLogOperator.YARN_LOG_STORAGE_PATH.getValue match { + case storePath if StringUtils.isNotBlank(storePath) => + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + storePath + "/" + applicationId) + // Invoke to create directory + fs.mkdir(logPath) + // 777 permission + fs.setPermission(logPath, "rwxrwxrwx") + logPath + case _ => new FsPath(StorageUtils.FILE_SCHEMA + engineConnLogDir) + } + if (!fs.exists(rootLogPath) || !rootLogPath.toFile.isDirectory) { throw new ECMErrorException( LOG_IS_NOT_EXISTS.getErrorCode, - MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc, rootLogDir) + MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc, rootLogPath.getPath) ) } val creator = getAsThrow[String]("creator") - val applicationId = getAsThrow[String]("yarnApplicationId") - var logPath = new File(engineConnLogDir, "yarn_" + applicationId) - if (!logPath.exists()) { - val tempLogFile = - s".yarn_${applicationId}_${System.currentTimeMillis()}_${Thread.currentThread().getId}" - Utils.tryCatch { - var command = s"yarn logs -applicationId $applicationId >> $rootLogDir/$tempLogFile" - logger.info(s"Fetch yarn logs to temporary file: [$command]") - val processBuilder = new ProcessBuilder(sudoCommands(creator, command): _*) - processBuilder.environment.putAll(sys.env.asJava) - processBuilder.redirectErrorStream(false) - val process = processBuilder.start() - val waitFor = process.waitFor(5, TimeUnit.SECONDS) - logger.trace(s"waitFor: ${waitFor}, result: ${process.exitValue()}") - if (waitFor && process.waitFor() == 0) { - command = s"mv $rootLogDir/$tempLogFile $rootLogDir/yarn_$applicationId" - logger.info(s"Move and save yarn logs: [$command]") - Utils.exec(sudoCommands(creator, command)) - } else { - logPath = new File(engineConnLogDir, tempLogFile) - if (!logPath.exists()) { - throw new LinkisCommonErrorException( - -1, - s"Fetch yarn logs timeout, log aggregation has not completed or is not enabled" - ) - } + var logPath = new FsPath( + StorageUtils.FILE_SCHEMA + rootLogPath.getPath + "/yarn_" + applicationId + ) + if (!fs.exists(logPath)) { + val fetcher = yarnLogFetchers.computeIfAbsent( + applicationId, + new util.function.Function[String, Future[String]] { + override def apply(v1: String): Future[String] = + requestToFetchYarnLogs(creator, applicationId, rootLogPath.getPath) } - } { case e: Exception => - throw new LinkisCommonErrorException( - -1, - s"Fail to fetch yarn logs application: $applicationId, message: ${e.getMessage}" - ) + ) + // Just wait 5 seconds + Option(fetcher.get(5, TimeUnit.SECONDS)) match { + case Some(path) => logPath = new FsPath(StorageUtils.FILE_SCHEMA + path) + case _ => } + } - if (!logPath.exists() || !logPath.isFile) { + if (!fs.exists(logPath) || logPath.toFile.isDirectory) { throw new ECMErrorException( LOGFILE_IS_NOT_EXISTS.getErrorCode, - MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath) + MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath.getPath) ) } logger.info( s"Try to fetch EngineConn(id: $ticketId, instance: $engineConnInstance) yarn logs from ${logPath.getPath} in application id: $applicationId" ) - logPath + logPath.toFile + } + + /** + * Not support line pattern in yarn log + * @return + */ + override protected def getLinePattern: Regex = null + + /** + * Request the log fetcher + * + * @param creator + * creator + * @param applicationId + * application id + * @param logPath + * log path + * @return + */ + private def requestToFetchYarnLogs( + creator: String, + applicationId: String, + yarnLogDir: String + ): Future[String] = { + EngineConnYarnLogOperator.YARN_LOG_FETCH_SCHEDULER.submit(new Callable[String] { + override def call(): String = { + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + yarnLogDir + "/yarn_" + applicationId) + if (!fs.exists(logPath)) { + val tempLogFile = + s".yarn_${applicationId}_${System.currentTimeMillis()}_${Thread.currentThread().getId}" + Utils.tryCatch { + var command = + s"yarn logs -applicationId $applicationId >> $yarnLogDir/$tempLogFile" + logger.info(s"Fetch yarn logs to temporary file: [$command]") + val processBuilder = new ProcessBuilder(sudoCommands(creator, command): _*) + processBuilder.environment.putAll(sys.env.asJava) + processBuilder.redirectErrorStream(false) + val process = processBuilder.start() + val exitCode = process.waitFor() + logger.trace(s"Finish to fetch yan logs to temporary file, result: ${exitCode}") + if (exitCode == 0) { + command = s"mv $yarnLogDir/$tempLogFile $yarnLogDir/yarn_$applicationId" + logger.info(s"Move and save yarn logs(${applicationId}): [$command]") + Utils.exec(sudoCommands(creator, command)) + } + } { e: Throwable => + logger.error( + s"Fail to fetch yarn logs application: $applicationId, message: ${e.getMessage}" + ) + } + val tmpFile = new File(yarnLogDir, tempLogFile) + if (tmpFile.exists()) { + logger.info(s"Delete temporary file: [${tempLogFile}] in yarn logs fetcher") + tmpFile.delete() + } + } + // Remove future + yarnLogFetchers.remove(applicationId) + if (fs.exists(logPath)) logPath.getPath else null + } + }) } private def sudoCommands(creator: String, command: String): Array[String] = { @@ -121,4 +180,15 @@ class EngineConnYarnLogOperator extends EngineConnLogOperator { object EngineConnYarnLogOperator { val OPERATOR_NAME = "engineConnYarnLog" + + // Specific the path to store the yarn logs + val YARN_LOG_STORAGE_PATH: CommonVars[String] = + CommonVars("linkis.engineconn.log.yarn.storage-path", "") + + val YARN_LOG_FETCH_THREAD: CommonVars[Int] = + CommonVars("linkis.engineconn.log.yarn.fetch.thread-num", 5) + + val YARN_LOG_FETCH_SCHEDULER: ExecutorService = + Utils.newFixedThreadPool(YARN_LOG_FETCH_THREAD.getValue + 1, "yarn_logs_fetch", false) + } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala index 6fdf8b3488f..150d0be6ba4 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala @@ -69,9 +69,9 @@ class DefaultECMHealthReport extends ECMHealthReport { override def getNodeMsg: String = nodeMsg - override def getUsedResource: Resource = usedResource +// override def getUsedResource: Resource = usedResource - override def setUsedResource(usedResource: Resource): Unit = this.usedResource = usedResource +// override def setUsedResource(usedResource: Resource): Unit = this.usedResource = usedResource override def getTotalResource: Resource = totalResource diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala deleted file mode 100644 index d36d35ccebe..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service - -import org.apache.linkis.ecm.core.engineconn.EngineConn -import org.apache.linkis.ecm.core.launch.EngineConnLaunchRunner -import org.apache.linkis.manager.common.entity.resource.Resource - -import java.util - -/** - * The enginelistservice interface manages the interface started by the engine The most important - * submit method is to put the thread that starts the engine into the thread pool to start - * EngineListService接口管理引擎启动的接口 最重要的submit方法是将启动引擎的线程放入到线程池中进行启动 - */ -trait EngineConnListService { - - def init(): Unit - - def getEngineConn(engineConnId: String): Option[EngineConn] - - def getEngineConns: util.List[EngineConn] - - def addEngineConn(engineConn: EngineConn): Unit - - def killEngineConn(engineConnId: String): Unit - - def getUsedResources: Resource - - def submit(runner: EngineConnLaunchRunner): Option[EngineConn] - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala deleted file mode 100644 index 8e833947387..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service - -import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid - -trait EngineConnPidCallbackService { - - def dealPid(protocol: ResponseEngineConnPid): Unit - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala deleted file mode 100644 index 12ccc088be7..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service - -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback - -trait EngineConnStatusCallbackService { - - def dealEngineConnStatusCallback(protocol: EngineConnStatusCallback): Unit - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala deleted file mode 100644 index 873b0981944..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service - -import org.apache.linkis.protocol.callback.LogCallbackProtocol - -trait LogCallbackService { - - def dealLog(protocol: LogCallbackProtocol): Unit - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala deleted file mode 100644 index 06a9c787c55..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service - -import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol} - -trait YarnCallbackService { - - def dealApplicationId(protocol: YarnAPPIdCallbackProtocol): Unit - - def dealApplicationURI(protocol: YarnInfoCallbackProtocol): Unit - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala index 6ac10d1e14a..df00ed4960e 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala @@ -25,22 +25,26 @@ import org.apache.linkis.ecm.server.LinkisECMApplication import org.apache.linkis.ecm.server.conf.ECMConfiguration._ import org.apache.linkis.ecm.server.engineConn.DefaultEngineConn import org.apache.linkis.ecm.server.hook.ECMHook -import org.apache.linkis.ecm.server.listener.{EngineConnAddEvent, EngineConnStatusChangeEvent} +import org.apache.linkis.ecm.server.listener.EngineConnStopEvent import org.apache.linkis.ecm.server.service.{EngineConnLaunchService, ResourceLocalizationService} import org.apache.linkis.ecm.server.util.ECMUtils import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.governance.common.utils.JobUtils +import org.apache.linkis.governance.common.utils.{ECPathUtils, JobUtils, LoggerUtils} +import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus.Failed import org.apache.linkis.manager.common.entity.node.{AMEngineNode, EngineNode} -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM +import org.apache.linkis.manager.common.protocol.engine.{ + EngineConnStatusCallbackToAM, + EngineStopRequest +} import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest +import org.apache.linkis.manager.label.constant.LabelValueConstant +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender import org.apache.commons.lang3.exception.ExceptionUtils -import scala.concurrent.{ExecutionContextExecutorService, Future} -import scala.util.{Failure, Success} +import scala.concurrent.ExecutionContextExecutorService abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService with Logging { @@ -61,8 +65,9 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w } override def launchEngineConn(request: EngineConnLaunchRequest, duration: Long): EngineNode = { - // 1.创建engineConn和runner,launch 并设置基础属性 + // create engineConn/runner/launch val taskId = JobUtils.getJobIdFromStringMap(request.creationDesc.properties) + LoggerUtils.setJobIdMDC(taskId) logger.info("TaskId: {} try to launch a new EngineConn with {}.", taskId: Any, request: Any) val conn = createEngineConn val runner = createEngineConnLaunchRunner @@ -77,11 +82,9 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w conn.setStatus(NodeStatus.Starting) conn.setEngineConnInfo(new EngineConnInfo) conn.setEngineConnManagerEnv(launch.getEngineConnManagerEnv()) - // 2.资源本地化,并且设置ecm的env环境信息 + // get ec Resource getResourceLocalizationServie.handleInitEngineConnResources(request, conn) - // 3.添加到list - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(EngineConnAddEvent(conn)) - // 4.run + // start ec Utils.tryCatch { beforeLaunch(request, conn, duration) runner.run() @@ -95,44 +98,12 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w case _ => } afterLaunch(request, conn, duration) - - val future = Future { - logger.info( - "TaskId: {} with request {} wait engineConn {} start", - Array(taskId, request, conn.getServiceInstance): _* - ) - waitEngineConnStart(request, conn, duration) - } - - future onComplete { - case Failure(t) => - logger.error( - "TaskId: {} init {} failed. {} with request {}", - Array( - taskId, - conn.getServiceInstance, - conn.getEngineConnLaunchRunner.getEngineConnLaunch - .getEngineConnManagerEnv() - .engineConnWorkDir, - request - ): _* - ) - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(conn.getTickedId, Failed) - ) - case Success(_) => - logger.info( - "TaskId: {} init {} succeed. {} with request {}", - Array( - taskId, - conn.getServiceInstance, - conn.getEngineConnLaunchRunner.getEngineConnLaunch - .getEngineConnManagerEnv() - .engineConnWorkDir, - request - ): _* - ) - } + logger.info( + "TaskId: {} with request {} wait engineConn {} start", + Array(taskId, request, conn.getServiceInstance): _* + ) + // start ec monitor thread + startEngineConnMonitorStart(request, conn) } { t => logger.error( "TaskId: {} init {} failed, {}, with request {} now stop and delete it. message: {}", @@ -147,31 +118,51 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w t ): _* ) - conn.getEngineConnLaunchRunner.stop() Sender .getSender(MANAGER_SERVICE_NAME) .send( EngineConnStatusCallbackToAM( conn.getServiceInstance, - NodeStatus.ShuttingDown, + NodeStatus.Failed, " wait init failed , reason " + ExceptionUtils.getRootCauseMessage(t) ) ) - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(conn.getTickedId, Failed) + conn.setStatus(NodeStatus.Failed) + val engineType = LabelUtil.getEngineType(request.labels) + val logPath = Utils.tryCatch(conn.getEngineConnManagerEnv.engineConnLogDirs) { t => + ECPathUtils.getECWOrkDirPathSuffix(request.user, request.ticketId, engineType) + } + val engineStopRequest = new EngineStopRequest + engineStopRequest.setEngineType(engineType) + engineStopRequest.setUser(request.user) + engineStopRequest.setIdentifier(conn.getPid) + engineStopRequest.setIdentifierType(AMConstant.PROCESS_MARK) + engineStopRequest.setLogDirSuffix(logPath) + engineStopRequest.setServiceInstance(conn.getServiceInstance) + LinkisECMApplication.getContext.getECMAsyncListenerBus.post( + EngineConnStopEvent(conn, engineStopRequest) ) + LoggerUtils.removeJobIdMDC() throw t } + LoggerUtils.removeJobIdMDC() + val label = LabelUtil.getEngingeConnRuntimeModeLabel(request.labels) + val isYarnClusterMode: Boolean = + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) true + else false val engineNode = new AMEngineNode() engineNode.setLabels(conn.getLabels) - engineNode.setServiceInstance(conn.getServiceInstance) engineNode.setOwner(request.user) - engineNode.setMark("process") + if (isYarnClusterMode) { + engineNode.setMark(AMConstant.CLUSTER_PROCESS_MARK) + } else { + engineNode.setMark(AMConstant.PROCESS_MARK) + } engineNode } - def waitEngineConnStart(request: EngineConnLaunchRequest, conn: EngineConn, duration: Long): Unit + def startEngineConnMonitorStart(request: EngineConnLaunchRequest, conn: EngineConn): Unit def createEngineConn: EngineConn = new DefaultEngineConn diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala index 132749cbeca..be879f68777 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala @@ -25,7 +25,7 @@ import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.conf.ECMConfiguration._ import org.apache.linkis.ecm.server.listener.{ECMClosedEvent, ECMReadyEvent} import org.apache.linkis.ecm.server.report.DefaultECMHealthReport -import org.apache.linkis.ecm.server.service.{ECMHealthService, EngineConnListService} +import org.apache.linkis.ecm.server.service.ECMHealthService import org.apache.linkis.ecm.server.util.ECMUtils import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} import org.apache.linkis.manager.common.entity.metrics.{NodeHealthyInfo, NodeOverLoadInfo} @@ -38,8 +38,6 @@ import org.apache.linkis.manager.common.protocol.node.{ import org.apache.linkis.rpc.Sender import org.apache.linkis.rpc.message.annotation.Receiver -import org.springframework.beans.factory.annotation.Autowired - import java.util.Date import java.util.concurrent.TimeUnit @@ -79,9 +77,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener { TimeUnit.SECONDS ) - @Autowired - private var engineConnListService: EngineConnListService = _ - override def getLastEMHealthReport: ECMHealthReport = { val report = new DefaultECMHealthReport report.setNodeId(LinkisECMApplication.getECMServiceInstance.toString) @@ -89,7 +84,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener { // todo report right metrics report.setTotalResource(maxResource) report.setProtectedResource(minResource) - report.setUsedResource(engineConnListService.getUsedResources) report.setReportTime(new Date().getTime) report.setRunningEngineConns( LinkisECMApplication.getContext.getECMMetrics.getRunningEngineConns @@ -117,7 +111,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener { // todo report latest engineconn metrics resource.setMaxResource(maxResource) resource.setMinResource(minResource) - resource.setUsedResource(report.getUsedResource) heartbeat.setNodeResource(resource) heartbeat.setHeartBeatMsg("") val nodeHealthyInfo = new NodeHealthyInfo diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala index eb9206c9634..4c7807dad15 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala @@ -37,7 +37,9 @@ import java.util.Collections class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener with Logging { - private implicit def readyEvent2RegisterECMRequest(event: ECMReadyEvent): RegisterEMRequest = { + private var unRegisterFlag = false + + private def readyEvent2RegisterECMRequest(event: ECMReadyEvent): RegisterEMRequest = { val request = new RegisterEMRequest val instance = Sender.getThisServiceInstance request.setUser(Utils.getJvmUser) @@ -50,14 +52,11 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener private def getLabelsFromArgs(params: Array[String]): util.Map[String, AnyRef] = { import scala.collection.JavaConverters._ - val labelRegex = """label\.(.+)\.(.+)=(.+)""".r val labels = new util.HashMap[String, AnyRef]() - // TODO: magic labels.asScala += LabelKeyConstant.SERVER_ALIAS_KEY -> Collections.singletonMap( "alias", ENGINE_CONN_MANAGER_SPRING_NAME ) - // TODO: group by key labels } @@ -81,12 +80,12 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener } override def onEvent(event: ECMEvent): Unit = event match { - case event: ECMReadyEvent => registerECM(event) - case event: ECMClosedEvent => unRegisterECM(event) + case event: ECMReadyEvent => registerECM(readyEvent2RegisterECMRequest(event)) + case event: ECMClosedEvent => unRegisterECM(closeEvent2StopECMRequest(event)) case _ => } - private implicit def closeEvent2StopECMRequest(event: ECMClosedEvent): StopEMRequest = { + private def closeEvent2StopECMRequest(event: ECMClosedEvent): StopEMRequest = { val request = new StopEMRequest val instance = Sender.getThisServiceInstance request.setUser(Utils.getJvmUser) @@ -114,7 +113,10 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener override def unRegisterECM(request: StopEMRequest): Unit = { logger.info("start unRegister ecm") - Sender.getSender(MANAGER_SERVICE_NAME).send(request) + if (!unRegisterFlag) { + Sender.getSender(MANAGER_SERVICE_NAME).send(request) + } + unRegisterFlag = true } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java deleted file mode 100644 index 4f593736d93..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl; - -import org.apache.commons.io.IOUtils; -import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.ecm.core.engineconn.EngineConn; -import org.apache.linkis.ecm.server.conf.ECMConfiguration; -import org.apache.linkis.ecm.server.service.EngineConnKillService; -import org.apache.linkis.ecm.server.service.EngineConnListService; -import org.apache.linkis.engineconn.common.conf.EngineConnConf; -import org.apache.linkis.governance.common.utils.GovernanceUtils; -import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; -import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse; -import org.apache.linkis.manager.common.protocol.engine.EngineSuicideRequest; -import org.apache.linkis.manager.label.entity.Label; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.rpc.message.annotation.Receiver; -import org.apache.linkis.rpc.Sender; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class DefaultEngineConnKillService implements EngineConnKillService { - - private static final Logger logger = LoggerFactory.getLogger(DefaultEngineConnKillService.class); - - private EngineConnListService engineConnListService; - - public void setEngineConnListService(EngineConnListService engineConnListService) { - this.engineConnListService = engineConnListService; - } - - private static final ThreadPoolExecutor ecYarnAppKillService = Utils.newCachedThreadPool(10, "ECM-Kill-EC-Yarn-App", true); - - @Override - @Receiver - public EngineStopResponse dealEngineConnStop(EngineStopRequest engineStopRequest) { - logger.info("received EngineStopRequest " + engineStopRequest); - EngineConn engineConn = getEngineConnByServiceInstance(engineStopRequest.getServiceInstance()); - EngineStopResponse response = new EngineStopResponse(); - if (null != engineConn) { - if(!killEngineConnByPid(engineConn)) { - response.setStopStatus(false); - response.setMsg("Kill engine " + engineConn.getServiceInstance().toString() + " failed."); - } else { - response.setStopStatus(true); - response.setMsg("Kill engine " + engineConn.getServiceInstance().toString() + " succeed."); - } - killYarnAppIdOfOneEc(engineConn); - } else { - logger.warn("Cannot find engineconn : " + engineStopRequest.getServiceInstance().toString() + " in this engineConnManager engineConn list, cannot kill."); - response.setStopStatus(true); - response.setMsg("EngineConn " + engineStopRequest.getServiceInstance().toString() + " was not found in this engineConnManager."); - } - if (!response.getStopStatus()) { - EngineSuicideRequest request = new EngineSuicideRequest(engineStopRequest.getServiceInstance(), engineStopRequest.getUser()); - try { - Sender.getSender(engineStopRequest.getServiceInstance()).send(request); - response.setStopStatus(true); - response.setMsg(response.getMsg() + " Now send suicide request to engine."); - } catch (Exception e) { - response.setMsg(response.getMsg() + " Sended suicide request to engine error, " + e.getMessage()); - } - } - return response; - } - - public void killYarnAppIdOfOneEc(EngineConn engineConn) { - String engineConnInstance = engineConn.getServiceInstance().toString(); - logger.info("try to kill yarn app ids in the engine of ({}).", engineConnInstance); - String engineLogDir = engineConn.getEngineConnManagerEnv().engineConnLogDirs(); - final String errEngineLogPath = engineLogDir.concat(File.separator).concat("yarnApp.log"); - logger.info("try to parse the yarn app id from the engine err log file path: {}", errEngineLogPath); - File file = new File(errEngineLogPath); - if (file.exists()) - { - ecYarnAppKillService.execute(() -> { - BufferedReader in = null; - try { - in = new BufferedReader(new FileReader(errEngineLogPath)); - String line; - String regex = getYarnAppRegexByEngineType(engineConn); - if (StringUtils.isBlank(regex)) { - return; - } - Pattern pattern = Pattern.compile(regex); - List appIds = new ArrayList<>(); - while ((line = in.readLine()) != null) { - if (StringUtils.isNotBlank(line)) { - Matcher mApp = pattern.matcher(line); - if (mApp.find()) { - String candidate1 = mApp.group(mApp.groupCount()); - if (!appIds.contains(candidate1)) { - appIds.add(candidate1); - } - } - } - } - GovernanceUtils.killYarnJobApp(appIds); - logger.info("finished kill yarn app ids in the engine of ({}).", engineConnInstance); - } catch (IOException ioEx) { - if (ioEx instanceof FileNotFoundException) { - logger.error("the engine log file {} not found.", errEngineLogPath); - } else { - logger.error("the engine log file parse failed. the reason is {}", ioEx.getMessage()); - } - } finally { - IOUtils.closeQuietly(in); - } - }); - } - } - - private String getYarnAppRegexByEngineType(EngineConn engineConn) { - List> labels = engineConn.getLabels(); - String engineType = ""; - if (labels != null && !labels.isEmpty()) { - Optional labelOptional = labels.stream().filter(label -> label instanceof EngineTypeLabel) - .map(label -> (EngineTypeLabel) label).findFirst(); - if (labelOptional.isPresent()) { - EngineTypeLabel engineTypeLabel = labelOptional.get(); - engineType = engineTypeLabel.getEngineType(); - } - } - if (StringUtils.isBlank(engineType)) { - return ""; - } - String regex; - switch (engineType) { - case "spark": - case "shell": - regex = EngineConnConf.SPARK_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; - case "sqoop": - regex = EngineConnConf.SQOOP_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; - case "hive": - regex = EngineConnConf.HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; - default: - regex = ""; - } - return regex; - } - - private EngineConn getEngineConnByServiceInstance(ServiceInstance serviceInstance) { - if (null == serviceInstance) { - return null; - } - List engineConnList = engineConnListService.getEngineConns(); - for (EngineConn engineConn : engineConnList) { - if (null != engineConn && serviceInstance.equals(engineConn.getServiceInstance())) { - return engineConn; - } - } - return null; - } - - private boolean killEngineConnByPid(EngineConn engineConn) { - logger.info("try to kill {} toString with pid({}).", engineConn.getServiceInstance().toString(), engineConn.getPid()); - if (StringUtils.isNotBlank(engineConn.getPid())) { - if (ECMConfiguration.ECM_PROCESS_SCRIPT_KILL()) { - GovernanceUtils.killProcess(engineConn.getPid(), engineConn.getServiceInstance().toString(), true); - } else { - killProcessByKillCmd(engineConn.getPid(), engineConn.getServiceInstance().toString()); - } - if (isProcessAlive(engineConn.getPid())) { - return false; - } else { - return true; - } - } else { - logger.warn("cannot kill {} with empty pid.", engineConn.getServiceInstance().toString()); - return false; - } - } - - private boolean isProcessAlive(String pid) { - String findCmd = "ps -ef | grep " + pid + " | grep EngineConnServer | awk '{print \"exists_\"$2}' | grep " + pid; - List cmdList = new ArrayList<>(); - cmdList.add("bash"); - cmdList.add("-c"); - cmdList.add(findCmd); - try { - String rs = Utils.exec(cmdList.toArray(new String[0]), 5000L); - return null != rs && rs.contains("exists_" + pid); - } catch (Exception e) { - // todo when thread catch exception , it should not be return false - logger.warn("Method isProcessAlive failed, " + e.getMessage()); - return false; - } - } - - private void killProcessByKillCmd(String pid, String desc ) { - String k15cmd = "sudo kill " + pid; - String k9cmd = "sudo kill -9 " + pid; - int tryNum = 0; - try { - while (isProcessAlive(pid) && tryNum <= 3) { - logger.info("{} still alive with pid({}), use shell command to kill it. try {}++", desc, pid, tryNum++); - if (tryNum <= 3) { - Utils.exec(k15cmd.split(" "), 3000L); - } else { - logger.info("{} still alive with pid({}). try {}, use shell command to kill -9 it", desc, pid, tryNum); - Utils.exec(k9cmd.split(" "), 3000L); - } - Thread.sleep(5000); - } - } catch (InterruptedException e) { - logger.error("Interrupted while killing engine {} with pid({})." + desc, pid); - } - } -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala deleted file mode 100644 index 4b9a59b4d7c..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.DataWorkCloudApplication -import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.ecm.core.engineconn.{EngineConn, YarnEngineConn} -import org.apache.linkis.ecm.core.launch.EngineConnLaunchRunner -import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.converter.ECMEngineConverter -import org.apache.linkis.ecm.server.listener._ -import org.apache.linkis.ecm.server.service.EngineConnListService -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.resource.{Resource, ResourceType} -import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest - -import org.apache.commons.lang3.StringUtils - -import java.util -import java.util.concurrent.ConcurrentHashMap - -import scala.collection.JavaConverters._ - -import com.google.common.collect.Interners - -class DefaultEngineConnListService - extends EngineConnListService - with ECMEventListener - with Logging { - - /** - * key:tickedId,value :engineConn - */ - private val engineConnMap = new ConcurrentHashMap[String, EngineConn] - - private var engineConnKillService: DefaultEngineConnKillService = _ - - val lock = Interners.newWeakInterner[String] - - override def init(): Unit = {} - - override def getEngineConn(engineConnId: String): Option[EngineConn] = Option( - engineConnMap.get(engineConnId) - ) - - override def getEngineConns: util.List[EngineConn] = - new util.ArrayList[EngineConn](engineConnMap.values()) - - override def addEngineConn(engineConn: EngineConn): Unit = { - logger.info(s"add engineConn ${engineConn.getServiceInstance} to engineConnMap") - if (LinkisECMApplication.isReady) { - engineConnMap.put(engineConn.getTickedId, engineConn) - } - } - - override def killEngineConn(engineConnId: String): Unit = { - var conn = engineConnMap.get(engineConnId) - if (conn != null) engineConnId.intern().synchronized { - conn = engineConnMap.get(engineConnId) - if (conn != null) { - Utils.tryAndWarn { - if (NodeStatus.Failed == conn.getStatus && StringUtils.isNotBlank(conn.getPid)) { - killECByEngineConnKillService(conn) - } else { - getEngineConnKillService().killYarnAppIdOfOneEc(conn) - } - conn.close() - } - engineConnMap.remove(engineConnId) - logger.info(s"engineconn ${conn.getServiceInstance} was closed.") - } - } - } - - override def getUsedResources: Resource = engineConnMap - .values() - .asScala - .map(_.getResource.getMinResource) - .fold(Resource.initResource(ResourceType.Default))(_ + _) - - override def submit(runner: EngineConnLaunchRunner): Option[EngineConn] = { - None - } - - def updateYarnAppId(event: YarnAppIdCallbackEvent): Unit = { - updateYarnEngineConn( - x => x.setApplicationId(event.protocol.applicationId), - event.protocol.nodeId - ) - } - - def updateYarnEngineConn(implicit - updateFunction: YarnEngineConn => Unit, - nodeId: String - ): Unit = { - lock.intern(nodeId) synchronized { - engineConnMap.get(nodeId) match { - case e: YarnEngineConn => updateFunction(e) - case e: EngineConn => - engineConnMap.put(nodeId, ECMEngineConverter.engineConn2YarnEngineConn(e)) - } - } - } - - def updateEngineConn(updateFunction: EngineConn => Unit, nodeId: String): Unit = { - lock.intern(nodeId) synchronized { - engineConnMap.get(nodeId) match { - case e: EngineConn => updateFunction(e) - case _ => - } - } - } - - def updateYarnInfo(event: YarnInfoCallbackEvent): Unit = { - updateYarnEngineConn(x => x.setApplicationURL(event.protocol.uri), event.protocol.nodeId) - } - - def updatePid(event: EngineConnPidCallbackEvent): Unit = { - updateEngineConn( - x => { - x.setPid(event.protocol.pid) - x.setServiceInstance(event.protocol.serviceInstance) - }, - event.protocol.ticketId - ) - } - - def updateEngineConnStatus(tickedId: String, updateStatus: NodeStatus): Unit = { - updateEngineConn(x => x.setStatus(updateStatus), tickedId) - if (NodeStatus.isCompleted(updateStatus)) { - logger.info(s" from engineConnMap to remove engineconn ticketId ${tickedId}") - killEngineConn(tickedId) - } - } - - override def onEvent(event: ECMEvent): Unit = { - logger.info(s"Deal event $event") - event match { - case event: ECMClosedEvent => shutdownEngineConns(event) - case event: YarnAppIdCallbackEvent => updateYarnAppId(event) - case event: YarnInfoCallbackEvent => updateYarnInfo(event) - case event: EngineConnPidCallbackEvent => updatePid(event) - case EngineConnAddEvent(engineConn) => addEngineConn(engineConn) - case EngineConnStatusChangeEvent(tickedId, updateStatus) => - updateEngineConnStatus(tickedId, updateStatus) - case _ => - } - } - - private def getEngineConnKillService(): DefaultEngineConnKillService = { - if (engineConnKillService == null) { - val applicationContext = DataWorkCloudApplication.getApplicationContext - engineConnKillService = applicationContext.getBean(classOf[DefaultEngineConnKillService]) - } - engineConnKillService - } - - private def shutdownEngineConns(event: ECMClosedEvent): Unit = { - logger.info("start to kill all engines belonging the ecm") - engineConnMap - .values() - .asScala - .foreach(engineconn => { - killECByEngineConnKillService(engineconn) - }) - logger.info("Done! success to kill all engines belonging the ecm") - } - - private def killECByEngineConnKillService(engineconn: EngineConn): Unit = { - logger.info(s"start to kill ec by engineConnKillService ${engineconn.getServiceInstance}") - val engineStopRequest = new EngineStopRequest() - engineStopRequest.setServiceInstance(engineconn.getServiceInstance) - getEngineConnKillService().dealEngineConnStop(engineStopRequest) - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala deleted file mode 100644 index 6fb2d4700d8..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.listener.EngineConnPidCallbackEvent -import org.apache.linkis.ecm.server.service.EngineConnPidCallbackService -import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid -import org.apache.linkis.rpc.message.annotation.Receiver - -class DefaultEngineConnPidCallbackService extends EngineConnPidCallbackService with Logging { - - @Receiver - override def dealPid(protocol: ResponseEngineConnPid): Unit = { - // 1.设置pid - // 2.设置serviceInstance - // 3.状态为running - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnPidCallbackEvent(protocol) - ) - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala deleted file mode 100644 index af627afecef..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME -import org.apache.linkis.ecm.server.listener.EngineConnStatusChangeEvent -import org.apache.linkis.ecm.server.service.EngineConnStatusCallbackService -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus.{Failed, Running} -import org.apache.linkis.manager.common.protocol.engine.{ - EngineConnStatusCallback, - EngineConnStatusCallbackToAM -} -import org.apache.linkis.rpc.Sender -import org.apache.linkis.rpc.message.annotation.Receiver - -import org.springframework.stereotype.Service - -@Service -class DefaultEngineConnStatusCallbackService extends EngineConnStatusCallbackService with Logging { - - @Receiver - override def dealEngineConnStatusCallback(protocol: EngineConnStatusCallback): Unit = { - logger.info(s"Start to deal EngineConnStatusCallback $protocol") - - if (NodeStatus.isAvailable(protocol.status)) { - - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(protocol.ticketId, Running) - ) - } else { - - Sender - .getSender(MANAGER_SERVICE_NAME) - .send( - EngineConnStatusCallbackToAM( - protocol.serviceInstance, - protocol.status, - protocol.initErrorMsg - ) - ) - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(protocol.ticketId, Failed) - ) - } - - logger.info(s"Finished to deal EngineConnStatusCallback $protocol") - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala deleted file mode 100644 index 0bb2e1366ff..00000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.listener.{YarnAppIdCallbackEvent, YarnInfoCallbackEvent} -import org.apache.linkis.ecm.server.service.YarnCallbackService -import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol} -import org.apache.linkis.rpc.message.annotation.Receiver - -class DefaultYarnCallbackService extends YarnCallbackService { - - @Receiver - override def dealApplicationId(protocol: YarnAPPIdCallbackProtocol): Unit = { - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - YarnAppIdCallbackEvent(protocol) - ) - } - - @Receiver - override def dealApplicationURI(protocol: YarnInfoCallbackProtocol): Unit = { - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(YarnInfoCallbackEvent(protocol)) - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala new file mode 100644 index 00000000000..764a704887e --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.service.impl + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} +import org.apache.linkis.ecm.server.listener.EngineConnStopEvent +import org.apache.linkis.ecm.server.service.EngineConnKillService +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus + +class ECMListenerService extends ECMEventListener with Logging { + + private var engineConnKillService: EngineConnKillService = _ + + override def onEvent(event: ECMEvent): Unit = event match { + case EngineConnStopEvent(engineConn, engineStopRequest) => + if (NodeStatus.Failed == engineConn.getStatus) { + logger.info("deal stopEvent to kill ec {}", engineStopRequest) + engineConnKillService.dealEngineConnStop(engineStopRequest) + } else { + if (engineConnKillService.isInstanceOf[DefaultEngineConnKillService]) { + logger.info("deal stopEvent to kill yarn app {}", engineStopRequest) + engineConnKillService + .asInstanceOf[DefaultEngineConnKillService] + .killYarnAppIdOfOneEc(engineStopRequest) + } + } + case _ => + } + + def getEngineConnKillService(): EngineConnKillService = { + engineConnKillService + } + + def setEngineConnKillService(engineConnKillService: EngineConnKillService): Unit = { + this.engineConnKillService = engineConnKillService + } + +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala index ad58ba723fe..b20590f04a7 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala @@ -19,31 +19,28 @@ package org.apache.linkis.ecm.server.service.impl import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.Utils -import org.apache.linkis.ecm.core.conf.ECMErrorCode import org.apache.linkis.ecm.core.engineconn.EngineConn import org.apache.linkis.ecm.core.launch.ProcessEngineConnLaunch -import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ import org.apache.linkis.ecm.server.LinkisECMApplication import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME -import org.apache.linkis.ecm.server.exception.ECMErrorException -import org.apache.linkis.ecm.server.listener.EngineConnStatusChangeEvent +import org.apache.linkis.ecm.server.listener.EngineConnStopEvent import org.apache.linkis.ecm.server.service.LocalDirsHandleService +import org.apache.linkis.ecm.utils.ECMCacheUtils +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus._ -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM +import org.apache.linkis.manager.common.protocol.engine.{ + EngineConnStatusCallbackToAM, + EngineStopRequest +} import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender import org.apache.commons.io.IOUtils -import org.apache.commons.lang3.StringUtils -import org.apache.commons.lang3.exception.ExceptionUtils - -import java.util.concurrent.TimeUnit -import scala.concurrent.{Future, TimeoutException} -import scala.concurrent.duration.Duration +import scala.concurrent.Future abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchService { @@ -52,110 +49,94 @@ abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchSe def setLocalDirsHandleService(localDirsHandleService: LocalDirsHandleService): Unit = this.localDirsHandleService = localDirsHandleService - override def waitEngineConnStart( + override def startEngineConnMonitorStart( request: EngineConnLaunchRequest, - conn: EngineConn, - duration: Long + conn: EngineConn ): Unit = { conn.getEngineConnLaunchRunner.getEngineConnLaunch match { case launch: ProcessEngineConnLaunch => - Utils.tryCatch { - // Set the pid of the shell script before the pid callBack returns - launch.getPid().foreach(conn.setPid) - processMonitorThread(conn, launch, duration) - } { case e: Throwable => - val logPath = Utils.tryCatch(conn.getEngineConnManagerEnv.engineConnLogDirs) { t => - localDirsHandleService.getEngineConnLogDir( - request.user, - request.ticketId, - LabelUtil.getEngineType(request.labels) - ) - } - val canRetry = e match { - case ecmError: ECMErrorException => - if (ECMErrorCode.EC_START_TIME_OUT == ecmError.getErrCode) { - true - } else if (StringUtils.isBlank(ecmError.getDesc)) { - logger.info("exception desc is null, can be retry") - true - } else { - false - } - case _ => false - } - logger.warn( - s"Failed to init ${conn.getServiceInstance}, status shutting down, canRetry $canRetry, logPath $logPath", - e - ) - Sender - .getSender(MANAGER_SERVICE_NAME) - .send( - EngineConnStatusCallbackToAM( - conn.getServiceInstance, - NodeStatus.ShuttingDown, - "Failed to start EngineConn, reason: " + ExceptionUtils.getRootCauseMessage( - e - ) + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", - canRetry - ) - ) - throw e - } + launch.getPid().foreach(conn.setPid) + processMonitorThread(conn, launch) case _ => } } private def processMonitorThread( engineConn: EngineConn, - launch: ProcessEngineConnLaunch, - timeout: Long + launch: ProcessEngineConnLaunch ): Unit = { - val isCompleted: EngineConn => Boolean = engineConn => - engineConn.getStatus == Success || engineConn.getStatus == Failed - val tickedId = engineConn.getTickedId - val errorMsg = new StringBuilder Future { - val iterator = - IOUtils.lineIterator(launch.getProcessInputStream, Configuration.BDP_ENCODING.getValue) - var count = 0 - val maxLen = ECMConfiguration.ENGINE_START_ERROR_MSG_MAX_LEN.getValue - while (!isCompleted(engineConn) && iterator.hasNext && count < maxLen) { - val line = iterator.next() - errorMsg.append(line).append("\n") - count += 1 - } - val exitCode = Option(launch.processWaitFor) - if (exitCode.exists(_ != 0)) { - logger.info(s"engine ${tickedId} process exit ") - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(tickedId, ShuttingDown) - ) - } else { - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(tickedId, Success) - ) - } - } - Utils.tryThrow( - Utils - .waitUntil(() => engineConn.getStatus != Starting, Duration(timeout, TimeUnit.MILLISECONDS)) - ) { - case e: TimeoutException => - throw new ECMErrorException( - EC_START_TIME_OUT.getErrorCode, - EC_START_TIME_OUT.getErrorDesc + s" $engineConn ." - ) - case e: InterruptedException => // 比如被ms cancel - throw new ECMErrorException( - EC_INTERRUPT_TIME_OUT.getErrorCode, - EC_INTERRUPT_TIME_OUT.getErrorDesc + s" $engineConn ." + val tickedId = engineConn.getTickedId + val errorMsg = new StringBuilder + val taskId = + JobUtils.getJobIdFromStringMap(launch.getEngineConnLaunchRequest.creationDesc.properties) + LoggerUtils.setJobIdMDC(taskId) + Utils.tryAndWarnMsg { + val iterator = + IOUtils.lineIterator(launch.getProcessInputStream, Configuration.BDP_ENCODING.getValue) + var count = 0 + val maxLen = ECMConfiguration.ENGINE_START_ERROR_MSG_MAX_LEN.getValue + while (launch.isAlive && iterator.hasNext && count < maxLen) { + val line = iterator.next() + errorMsg.append(line).append("\n") + count += 1 + } + val exitCode = launch.processWaitFor + val engineType = LabelUtil.getEngineType(launch.getEngineConnLaunchRequest.labels) + val logPath = Utils.tryCatch(engineConn.getEngineConnManagerEnv.engineConnLogDirs) { t => + localDirsHandleService.getEngineConnLogDir( + launch.getEngineConnLaunchRequest.user, + tickedId, + engineType + ) + } + if (exitCode != 0) { + val stopRequest = ECMCacheUtils.getStopEC(engineConn.getServiceInstance) + if ( + null != stopRequest && engineConn.getPid != null && engineConn.getPid.equals( + stopRequest.getIdentifier + ) + ) { + logger.info( + s"EC ${engineConn.getServiceInstance} exit should by kill stop request $stopRequest, do not report status" + ) + } else { + val canRetry = + if (errorMsg.isEmpty || ECMConfiguration.EC_CAN_RETRY_EXIT_CODES.contains(exitCode)) { + true + } else { + false + } + logger.warn( + s"Failed to start ec ${engineConn.getServiceInstance}, status shutting down exit code ${exitCode}, canRetry ${canRetry}, logPath ${logPath}" + ) + Sender + .getSender(MANAGER_SERVICE_NAME) + .send( + EngineConnStatusCallbackToAM( + engineConn.getServiceInstance, + NodeStatus.ShuttingDown, + "Failed to start EngineConn, reason: " + errorMsg + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", + canRetry + ) + ) + } + engineConn.setStatus(NodeStatus.ShuttingDown) + } else { + engineConn.setStatus(NodeStatus.Success) + } + val engineStopRequest = new EngineStopRequest + engineStopRequest.setEngineType(engineType) + engineStopRequest.setUser(launch.getEngineConnLaunchRequest.user) + engineStopRequest.setIdentifier(engineConn.getPid) + engineStopRequest.setIdentifierType(AMConstant.PROCESS_MARK) + engineStopRequest.setLogDirSuffix(logPath) + engineStopRequest.setServiceInstance(engineConn.getServiceInstance) + LinkisECMApplication.getContext.getECMAsyncListenerBus.post( + EngineConnStopEvent(engineConn, engineStopRequest) ) - case t: Throwable => - logger.error(s"unexpected error, now shutdown it.") - throw t - } - if (engineConn.getStatus == ShuttingDown) { - throw new ECMErrorException(EC_START_FAILED.getErrorCode, errorMsg.toString()) + } { s"EngineConns: ${engineConn.getServiceInstance} monitor Failed" } + LoggerUtils.removeJobIdMDC() } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala index 4a684bbec1d..ec65cd885f1 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala @@ -19,7 +19,7 @@ package org.apache.linkis.ecm.server.spring import org.apache.linkis.ecm.core.listener.ECMEventListener import org.apache.linkis.ecm.server.context.{DefaultECMContext, ECMContext} -import org.apache.linkis.ecm.server.service._ +import org.apache.linkis.ecm.server.service.{EngineConnKillService, _} import org.apache.linkis.ecm.server.service.impl._ import org.springframework.beans.factory.annotation.Autowired @@ -39,16 +39,9 @@ class ECMSpringConfiguration { new DefaultECMContext } - @Bean - @ConditionalOnMissingBean - def getDefaultYarnCallbackService: YarnCallbackService = { - new DefaultYarnCallbackService - } - @Bean @ConditionalOnMissingBean def getBmlResourceLocalizationService( - context: ECMContext, localDirsHandleService: LocalDirsHandleService ): ResourceLocalizationService = { val service: BmlResourceLocalizationService = new BmlResourceLocalizationService @@ -57,32 +50,12 @@ class ECMSpringConfiguration { service } - @Bean - @ConditionalOnMissingBean - def getDefaultLogCallbackService: LogCallbackService = { - null - } - @Bean @ConditionalOnMissingBean def getDefaultlocalDirsHandleService: LocalDirsHandleService = { new DefaultLocalDirsHandleService } - @Bean - @ConditionalOnMissingBean - def getDefaultEngineConnPidCallbackService: EngineConnPidCallbackService = { - new DefaultEngineConnPidCallbackService - } - - @Bean - @ConditionalOnMissingBean - def getDefaultEngineConnListService(context: ECMContext): EngineConnListService = { - implicit val service: DefaultEngineConnListService = new DefaultEngineConnListService - registerSyncListener(context) - service - } - @Bean @ConditionalOnMissingBean def getLinuxProcessEngineConnLaunchService( @@ -98,38 +71,44 @@ class ECMSpringConfiguration { @Bean @ConditionalOnMissingBean def getDefaultECMRegisterService(context: ECMContext): ECMRegisterService = { - implicit val service: DefaultECMRegisterService = new DefaultECMRegisterService - registerSyncListener(context) + val service: DefaultECMRegisterService = new DefaultECMRegisterService + registerSyncListener(context, service) service } @Bean @ConditionalOnMissingBean def getDefaultECMHealthService(context: ECMContext): ECMHealthService = { - implicit val service: DefaultECMHealthService = new DefaultECMHealthService - registerSyncListener(context) + val service: DefaultECMHealthService = new DefaultECMHealthService + registerSyncListener(context, service) service } @Bean @ConditionalOnMissingBean def getDefaultEngineConnKillService( - engineConnListService: EngineConnListService ): EngineConnKillService = { val service = new DefaultEngineConnKillService - service.setEngineConnListService(engineConnListService) service } - private def registerSyncListener( + @Bean + @ConditionalOnMissingBean + def getECMListenerService( + engineConnKillService: EngineConnKillService, context: ECMContext - )(implicit listener: ECMEventListener): Unit = { + ): ECMListenerService = { + val service: ECMListenerService = new ECMListenerService + service.setEngineConnKillService(engineConnKillService) + registerASyncListener(context, service) + service + } + + private def registerSyncListener(context: ECMContext, listener: ECMEventListener): Unit = { context.getECMSyncListenerBus.addListener(listener) } - private def registerASyncListener( - context: ECMContext - )(implicit listener: ECMEventListener): Unit = { + private def registerASyncListener(context: ECMContext, listener: ECMEventListener): Unit = { context.getECMAsyncListenerBus.addListener(listener) } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala index 08addb94c19..2a50b40405c 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala @@ -102,8 +102,7 @@ object ECMUtils extends Logging { // if enable estimate actual memory if (ECM_STIMATE_ACTUAL_MEMORY_ENABLE) { - // 90% - val totalByte = (HardwareUtils.getMaxMemory() * 0.9).asInstanceOf[Long] + val totalByte = HardwareUtils.getMaxMemory() val resultMemory = math.max(totalByte, ECM_PROTECTED_MEMORY) // max of PhysicalMemory or ECM_PROTECTED_MEMORY diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml index 89aeec71cd8..020e094f811 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml @@ -41,7 +41,7 @@ org.apache.linkis - linkis-bml-client + linkis-pes-client ${project.version} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala index cd4029bae66..2586576bb72 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala @@ -28,11 +28,6 @@ import org.apache.linkis.manager.common.entity.enumeration.NodeStatus */ class OnceEngineConnHook extends CallbackEngineConnHook { - override protected def getNodeStatusOfStartSuccess( - engineCreationContext: EngineCreationContext, - engineConn: EngineConn - ): NodeStatus = NodeStatus.Unlock - override def afterEngineServerStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml index 8d904174b80..a83d19b1e01 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml @@ -30,7 +30,7 @@ org.apache.linkis - linkis-udf-client + linkis-pes-rpc-client ${project.version} @@ -60,18 +60,6 @@ provided - - org.apache.linkis - linkis-cs-client - ${project.version} - - - org.reflections - reflections - - - - org.apache.linkis linkis-computation-governance-common @@ -80,7 +68,7 @@ org.apache.linkis - linkis-bml-client + linkis-pes-client ${project.version} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/HardwareMonitorService.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/HardwareMonitorService.java index 20e3946f276..d444b63bbe9 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/HardwareMonitorService.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/HardwareMonitorService.java @@ -45,7 +45,7 @@ public boolean isAvailable() { double memoryUsage = HardwareUtils.memoryUsage(); - double loadAverageUsage = HardwareUtils.memoryUsage(); + double loadAverageUsage = HardwareUtils.loadAverageUsage(); Double maxMemoryUsage = MEMORY_MAX_USAGE.getValue(); Double maxCpuUsage = CPU_MAX_USAGE.getValue(); diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java index de6bb440dd1..55a383f070f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java @@ -55,7 +55,7 @@ public class TimingMonitorService implements InitializingBean, Runnable { @Override public void afterPropertiesSet() throws Exception { - if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM()) { + if ((Boolean) (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM().getValue())) { Utils.defaultScheduler() .scheduleAtFixedRate( this, 3 * 60 * 1000, MONITOR_INTERVAL.getValue().toLong(), TimeUnit.MILLISECONDS); @@ -77,17 +77,17 @@ public void run() { } } if (null == concurrentExecutor) { - LOG.warn("shell executor can not is null"); + LOG.warn("Executor can not is null"); return; } - isAvailable = true; + isAvailable = true; // NOSONAR monitorServiceList.forEach( monitorService -> { if (!monitorService.isAvailable()) { isAvailable = false; } }); - if (isAvailable) { + if (isAvailable) { // NOSONAR if (concurrentExecutor.isBusy()) synchronized (EXECUTOR_STATUS_LOCKER) { LOG.info("monitor turn to executor status from busy to unlock"); @@ -96,7 +96,7 @@ public void run() { } else { if (concurrentExecutor.isIdle()) synchronized (EXECUTOR_STATUS_LOCKER) { - LOG.info("monitor turn to executor status from busy to unlock"); + LOG.info("monitor turn to executor status from unlock to busy"); concurrentExecutor.transition(NodeStatus.Busy); } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/executor/exception/HookExecuteException.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/executor/exception/HookExecuteException.java new file mode 100644 index 00000000000..4d1fbbfe409 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/executor/exception/HookExecuteException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.exception; + +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +public class HookExecuteException extends LinkisRuntimeException { + + public HookExecuteException(int errCode, String desc) { + super(errCode, desc); + } + + public HookExecuteException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala index cf8b9c00b8a..9af394da80a 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala @@ -20,7 +20,10 @@ package org.apache.linkis.engineconn.computation.executor.async import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskResponseErrorEvent +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.{ @@ -29,13 +32,16 @@ import org.apache.linkis.engineconn.computation.executor.execute.{ } import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor import org.apache.linkis.engineconn.executor.listener.{ EngineConnSyncListenerBus, ExecutorListenerBusContext } import org.apache.linkis.governance.common.entity.ExecutionNodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.listener.JobListener @@ -48,6 +54,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.util import java.util.concurrent.ConcurrentHashMap +import DataWorkCloudApplication.getApplicationContext + abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: Int = 1000) extends ComputationExecutor(outputPrintLimit) with ConcurrentExecutor @@ -97,9 +105,6 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: }) } { e => logger.info("failed to do with hook", e) - engineExecutionContext.appendStdout( - LogUtils.generateWarn(s"failed execute hook: ${ExceptionUtils.getStackTrace(e)}") - ) } if (hookedCode.length > 100) { logger.info(s"hooked after code: ${hookedCode.substring(0, 100)} ....") @@ -207,6 +212,7 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: s"Executor is busy but still got new task ! Running task num : ${getRunningTask}" ) } + runningTasks.increase() if (getRunningTask >= getConcurrentLimit) synchronized { if (getRunningTask >= getConcurrentLimit && NodeStatus.isIdle(getStatus)) { logger.info( @@ -215,13 +221,25 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: transition(NodeStatus.Busy) } } - runningTasks.increase() } override def onJobCompleted(job: Job): Unit = { + runningTasks.decrease() job match { case asyncEngineConnJob: AsyncEngineConnJob => + val jobId = JobUtils.getJobIdFromMap(asyncEngineConnJob.getEngineConnTask.getProperties) + LoggerUtils.setJobIdMDC(jobId) + + if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) synchronized { + if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { + logger.info( + s"running task($getRunningTask) < concurrent limit $getConcurrentLimit, now to mark engine to Unlock " + ) + transition(NodeStatus.Unlock) + } + } + job.getState match { case Succeed => succeedTasks.increase() @@ -241,22 +259,42 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: } removeJob(asyncEngineConnJob.getEngineConnTask.getTaskId) clearTaskCache(asyncEngineConnJob.getEngineConnTask.getTaskId) - + // execute once should try to shutdown + if ( + asyncEngineConnJob.getEngineConnTask.getLables.exists(_.isInstanceOf[ExecuteOnceLabel]) + ) { + if (!hasTaskRunning()) { + logger.warn( + s"engineConnTask(${asyncEngineConnJob.getEngineConnTask.getTaskId}) is execute once, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } + // unhealthy node should try to shutdown + if (!hasTaskRunning() && currentEngineIsUnHealthy()) { + logger.info( + s"engineConnTask(${asyncEngineConnJob.getEngineConnTask.getTaskId}) is unHealthy, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + LoggerUtils.setJobIdMDC(jobId) case _ => } - if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) synchronized { - if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { - logger.info( - s"running task($getRunningTask) < concurrent limit $getConcurrentLimit, now to mark engine to Unlock " - ) - transition(NodeStatus.Unlock) - } - } } override def hasTaskRunning(): Boolean = { getRunningTask > 0 } + override def transition(toStatus: NodeStatus): Unit = { + if (getRunningTask >= getConcurrentLimit && NodeStatus.Unlock == toStatus) { + logger.info( + s"running task($getRunningTask) > concurrent limit:$getConcurrentLimit, can not to mark EC to Unlock" + ) + return + } + super.transition(toStatus) + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala index 8876a50c377..6f73f67fe65 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.computation.executor.async import org.apache.linkis.common.utils.Utils import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.scheduler.executer.{ CompletedExecuteResponse, ErrorExecuteResponse, @@ -27,6 +28,7 @@ import org.apache.linkis.scheduler.executer.{ SuccessExecuteResponse } import org.apache.linkis.scheduler.queue.{Job, JobInfo} +import org.apache.linkis.scheduler.queue.SchedulerEventState.SchedulerEventState class AsyncEngineConnJob(task: EngineConnTask, engineExecutionContext: EngineExecutionContext) extends Job { @@ -47,6 +49,14 @@ class AsyncEngineConnJob(task: EngineConnTask, engineExecutionContext: EngineExe override def close(): Unit = {} + override def transition(state: SchedulerEventState): Unit = Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + super.transition(state) + } { + LoggerUtils.removeJobIdMDC() + } + override def transitionCompleted(executeCompleted: CompletedExecuteResponse): Unit = { var executeCompletedNew: CompletedExecuteResponse = executeCompleted executeCompleted match { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala index a27d3f029ac..46332b93fd3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala @@ -17,8 +17,10 @@ package org.apache.linkis.engineconn.computation.executor.async +import org.apache.linkis.common.utils.Utils import org.apache.linkis.engineconn.common.exception.EngineConnException import org.apache.linkis.engineconn.computation.executor.utlis.ComputationErrorCode +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.executer.ExecutorState.ExecutorState @@ -31,10 +33,16 @@ class AsyncExecuteExecutor(executor: AsyncConcurrentComputationExecutor) extends override def execute(executeRequest: ExecuteRequest): ExecuteResponse = { executeRequest match { case asyncExecuteRequest: AsyncExecuteRequest => - executor.asyncExecuteTask( - asyncExecuteRequest.task, - asyncExecuteRequest.engineExecutionContext - ) + Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(asyncExecuteRequest.task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + executor.asyncExecuteTask( + asyncExecuteRequest.task, + asyncExecuteRequest.engineExecutionContext + ) + } { + LoggerUtils.removeJobIdMDC() + } case _ => throw EngineConnException( ComputationErrorCode.ASYNC_EXECUTOR_ERROR_CODE, diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala index 3959eb942bd..98f04daaa2b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.computation.executor.bml import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory} import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook @@ -62,7 +63,7 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { ): String = { val props = engineExecutionContext.getProperties if (null != props && props.containsKey(GovernanceConstant.TASK_RESOURCES_STR)) { - val workDir = ComputationEngineUtils.getCurrentWorkDir +// val workDir = ComputationEngineUtils.getCurrentWorkDir val jobId = engineExecutionContext.getJobId props.get(GovernanceConstant.TASK_RESOURCES_STR) match { case resources: util.List[Object] => @@ -71,9 +72,9 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { val fileName = resource.get(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR).toString val resourceId = resource.get(GovernanceConstant.TASK_RESOURCE_ID_STR).toString val version = resource.get(GovernanceConstant.TASK_RESOURCE_VERSION_STR).toString - val fullPath = - if (workDir.endsWith(seperator)) pathType + workDir + fileName - else pathType + workDir + seperator + fileName + val fullPath = fileName +// if (workDir.endsWith(seperator)) pathType + workDir + fileName +// else pathType + workDir + seperator + fileName val response = Utils.tryCatch { bmlClient.downloadShareResource(processUser, resourceId, version, fullPath, true) } { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala index fec2fe5e7ba..a6c055d4ecb 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala @@ -33,6 +33,12 @@ object ComputationExecutorConf { "Maximum number of tasks executed by the synchronization EC" ) + val PRINT_TASK_PARAMS_SKIP_KEYS = CommonVars( + "linkis.engineconn.print.task.params.skip.keys", + "jobId,wds.linkis.rm.yarnqueue", + "skip to print params key at job logs" + ) + val ENGINE_PROGRESS_FETCH_INTERVAL = CommonVars( "wds.linkis.engineconn.progresss.fetch.interval-in-seconds", @@ -54,7 +60,7 @@ object ComputationExecutorConf { ).getValue val ENGINE_CONCURRENT_THREAD_NUM = CommonVars( - "wds.linkis.engineconn.concurrent.thread.num", + "linkis.engineconn.concurrent.thread.num", 20, "Maximum thread pool of the concurrent EC" ) @@ -118,4 +124,35 @@ object ComputationExecutorConf { val TASK_SUBMIT_WAIT_TIME_MS = CommonVars("linkis.ec.task.submit.wait.time.ms", 2L, "Task submit wait time(ms)").getValue + val ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_ENABLED = + CommonVars("linkis.ec.send.log.entrance.limit.enabled", true) + + val ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH = + CommonVars("linkis.ec.send.log.entrance.limit.length", 2000) + + val ENGINE_KERBEROS_AUTO_REFRESH_ENABLED = + CommonVars("linkis.ec.kerberos.auto.refresh.enabled", false).getValue + + val CLOSE_RS_OUTPUT_WHEN_RESET_BY_DEFAULT_ENABLED = + CommonVars("linkis.ec.rs.close.when.reset.enabled", true).getValue + + val SPECIAL_UDF_CHECK_ENABLED = + CommonVars("linkis.ec.spacial.udf.check.enabled", false) + + val SPECIAL_UDF_CHECK_BY_REGEX_ENABLED = + CommonVars("linkis.ec.spacial.udf.check.by.regex.enabled", false) + + val SPECIAL_UDF_NAMES = + CommonVars("linkis.ec.spacial.udf.check.names", "") + + val SUPPORT_SPECIAL_UDF_LANGUAGES = + CommonVars("linkis.ec.support.spacial.udf.languages", "sql,python") + + val ONLY_SQL_USE_UDF_KEY = "load.only.sql.use.udf" + + val CODE_TYPE = "codeType" + + val SUPPORT_PARTIAL_RETRY_FOR_FAILED_TASKS_ENABLED: Boolean = + CommonVars[Boolean]("linkis.partial.retry.for.failed.task.enabled", false).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala index f59adaadeff..fe98e3328e2 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala @@ -17,7 +17,10 @@ package org.apache.linkis.engineconn.computation.executor.cs +import org.apache.linkis.common.utils.Logging import org.apache.linkis.cs.client.service.CSResourceService +import org.apache.linkis.engineconn.common.conf.EngineConnConf +import org.apache.linkis.governance.common.utils.GovernanceConstant import org.apache.commons.lang3.StringUtils @@ -27,7 +30,7 @@ import java.util.regex.Pattern import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer -class CSResourceParser { +class CSResourceParser extends Logging { private val pb = Pattern.compile("cs://[^\\s\"]+[$\\s]{0,1}", Pattern.CASE_INSENSITIVE) @@ -47,7 +50,6 @@ class CSResourceParser { nodeNameStr: String ): String = { - // TODO getBMLResource val bmlResourceList = CSResourceService.getInstance().getUpstreamBMLResource(contextIDValueStr, nodeNameStr) @@ -56,23 +58,25 @@ class CSResourceParser { val preFixNames = new ArrayBuffer[String]() val parsedNames = new ArrayBuffer[String]() + val prefixName = System.currentTimeMillis().toString + "_" preFixResourceNames.foreach { preFixResourceName => val resourceName = preFixResourceName.replace(PREFIX, "").trim val bmlResourceOption = bmlResourceList.asScala.find(_.getDownloadedFileName.equals(resourceName)) if (bmlResourceOption.isDefined) { + val replacementName = EngineConnConf.getEngineTmpDir + prefixName + resourceName val bmlResource = bmlResourceOption.get val map = new util.HashMap[String, Object]() - map.put("resourceId", bmlResource.getResourceId) - map.put("version", bmlResource.getVersion) - map.put("fileName", resourceName) + map.put(GovernanceConstant.TASK_RESOURCE_ID_STR, bmlResource.getResourceId) + map.put(GovernanceConstant.TASK_RESOURCE_VERSION_STR, bmlResource.getVersion) + map.put(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR, replacementName) parsedResources.add(map) preFixNames.append(preFixResourceName) - parsedNames.append(resourceName) + parsedNames.append(replacementName) + logger.warn(s"Replace cs file from {$preFixResourceName} to {$replacementName}") } - } - props.put("resources", parsedResources) + props.put(GovernanceConstant.TASK_RESOURCES_STR, parsedResources) StringUtils.replaceEach(code, preFixNames.toArray, parsedNames.toArray) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala index 780db4215a7..9fb241a791c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala @@ -21,10 +21,17 @@ import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor -import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskStatusChangedEvent +import org.apache.linkis.engineconn.acessible.executor.listener.event.{ + TaskLogUpdateEvent, + TaskResponseErrorEvent, + TaskStatusChangedEvent +} +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf.SUPPORT_PARTIAL_RETRY_FOR_FAILED_TASKS_ENABLED import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask +import org.apache.linkis.engineconn.computation.executor.exception.HookExecuteException import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.computation.executor.metrics.ComputationEngineConnMetrics import org.apache.linkis.engineconn.computation.executor.upstream.event.TaskStatusChangedForUpstreamMonitorEvent @@ -35,17 +42,22 @@ import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.paser.CodeParser import org.apache.linkis.governance.common.protocol.task.{EngineConcurrentInfo, RequestTask} +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel +import org.apache.linkis.manager.label.entity.engine.{EngineType, UserCreatorLabel} +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer._ import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils +import java.util import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger +import scala.collection.JavaConverters._ + import com.google.common.cache.{Cache, CacheBuilder} abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) @@ -77,9 +89,11 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) protected val failedTasks: Count = new Count - private var lastTask: EngineConnTask = _ + protected var lastTask: EngineConnTask = _ - private val MAX_TASK_EXECUTE_NUM = ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue + private val MAX_TASK_EXECUTE_NUM = ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue( + EngineConnObject.getEngineCreationContext.getOptions + ) private val CLOSE_LOCKER = new Object @@ -131,6 +145,12 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) override def close(): Unit = { if (null != lastTask) CLOSE_LOCKER.synchronized { + listenerBusContext.getEngineConnSyncListenerBus.postToAll( + TaskLogUpdateEvent( + lastTask.getTaskId, + LogUtils.generateERROR("EC exits unexpectedly and actively kills the task") + ) + ) killTask(lastTask.getTaskId) } else { @@ -151,6 +171,11 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) engineConnTask: EngineConnTask, executeResponse: ExecuteResponse ): Unit = { + Utils.tryAndWarn { + ComputationExecutorHook.getComputationExecutorHooks.foreach { hook => + hook.afterExecutorExecute(engineConnTask, executeResponse) + } + } val executorNumber = getSucceedNum + getFailedNum if ( MAX_TASK_EXECUTE_NUM > 0 && runningTasks @@ -159,6 +184,13 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) logger.error(s"Task has reached max execute number $MAX_TASK_EXECUTE_NUM, now tryShutdown. ") ExecutorManager.getInstance.getReportExecutor.tryShutdown() } + + // unhealthy node should try to shutdown + if (runningTasks.getCount() == 0 && currentEngineIsUnHealthy) { + logger.info("no task running and ECNode is unHealthy, now to mark engine to Finished.") + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } def toExecuteTask( @@ -170,25 +202,47 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) Utils.tryFinally { transformTaskStatus(engineConnTask, ExecutionNodeStatus.Running) val engineExecutionContext = createEngineExecutionContext(engineConnTask) + + val engineCreationContext = EngineConnObject.getEngineCreationContext + var hookedCode = engineConnTask.getCode Utils.tryCatch { - val engineCreationContext = EngineConnObject.getEngineCreationContext ComputationExecutorHook.getComputationExecutorHooks.foreach(hook => { hookedCode = hook.beforeExecutorExecute(engineExecutionContext, engineCreationContext, hookedCode) }) - }(e => logger.info("failed to do with hook", e)) + } { e => + e match { + case hookExecuteException: HookExecuteException => + failedTasks.increase() + logger.error("failed to do with hook", e) + return ErrorExecuteResponse("hook execute failed task will be failed", e) + case _ => logger.info("failed to do with hook", e) + } + } if (hookedCode.length > 100) { logger.info(s"hooked after code: ${hookedCode.substring(0, 100)} ....") } else { logger.info(s"hooked after code: $hookedCode ") } + + // task params log + // spark engine: at org.apache.linkis.engineplugin.spark.executor.SparkEngineConnExecutor.executeLine log special conf + Utils.tryAndWarn { + val engineType = LabelUtil.getEngineType(engineCreationContext.getLabels()) + EngineType.mapStringToEngineType(engineType) match { + case EngineType.HIVE | EngineType.TRINO => printTaskParamsLog(engineExecutionContext) + case _ => + } + } + val localPath = EngineConnConf.getLogDir engineExecutionContext.appendStdout( LogUtils.generateInfo( s"EngineConn local log path: ${DataWorkCloudApplication.getServiceInstance.toString} $localPath" ) ) + var response: ExecuteResponse = null val incomplete = new StringBuilder val codes = @@ -197,37 +251,75 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) Array(hookedCode) } engineExecutionContext.setTotalParagraph(codes.length) + + val retryEnable: Boolean = SUPPORT_PARTIAL_RETRY_FOR_FAILED_TASKS_ENABLED + codes.indices.foreach({ index => if (ExecutionNodeStatus.Cancelled == engineConnTask.getStatus) { return ErrorExecuteResponse("Job is killed by user!", null) } - val code = codes(index) - engineExecutionContext.setCurrentParagraph(index + 1) - response = Utils.tryCatch(if (incomplete.nonEmpty) { - executeCompletely(engineExecutionContext, code, incomplete.toString()) - } else executeLine(engineExecutionContext, code)) { t => - ErrorExecuteResponse(ExceptionUtils.getRootCauseMessage(t), t) + var executeFlag = true + val errorIndex: Int = Integer.valueOf( + engineConnTask.getProperties.getOrDefault("execute.error.code.index", "-1").toString + ) + engineExecutionContext.getProperties.put("execute.error.code.index", errorIndex.toString) + // 重试的时候如果执行过则跳过执行 + if (retryEnable && errorIndex > 0 && index < errorIndex) { + engineExecutionContext.appendStdout( + LogUtils.generateInfo( + s"aisql retry with errorIndex: ${errorIndex}, current sql index: ${index} will skip." + ) + ) + executeFlag = false } - // info(s"Finished to execute task ${engineConnTask.getTaskId}") - incomplete ++= code - response match { - case e: ErrorExecuteResponse => - failedTasks.increase() - logger.error("execute code failed!", e.t) - return response - case SuccessExecuteResponse() => - engineExecutionContext.appendStdout("\n") - incomplete.setLength(0) - case e: OutputExecuteResponse => - incomplete.setLength(0) - val output = - if (StringUtils.isNotEmpty(e.getOutput) && e.getOutput.length > outputPrintLimit) { - e.getOutput.substring(0, outputPrintLimit) - } else e.getOutput - engineExecutionContext.appendStdout(output) - if (StringUtils.isNotBlank(e.getOutput)) engineExecutionContext.sendResultSet(e) - case _: IncompleteExecuteResponse => - incomplete ++= incompleteSplitter + if (executeFlag) { + val code = codes(index) + engineExecutionContext.setCurrentParagraph(index + 1) + response = Utils.tryCatch(if (incomplete.nonEmpty) { + executeCompletely(engineExecutionContext, code, incomplete.toString()) + } else executeLine(engineExecutionContext, code)) { t => + ErrorExecuteResponse(ExceptionUtils.getRootCauseMessage(t), t) + } + // info(s"Finished to execute task ${engineConnTask.getTaskId}") + incomplete ++= code + response match { + case e: ErrorExecuteResponse => + val props: util.Map[String, String] = engineCreationContext.getOptions + val aiSqlEnable: String = props.getOrDefault("linkis.ai.sql.enable", "false").toString + val retryNum: Int = + Integer.valueOf(props.getOrDefault("linkis.ai.retry.num", "0").toString) + + if (retryEnable && !props.isEmpty && "true".equals(aiSqlEnable) && retryNum > 0) { + logger.info( + s"aisql execute failed, with index: ${index} retryNum: ${retryNum}, and will retry", + e.t + ) + engineExecutionContext.appendStdout( + LogUtils.generateInfo( + s"aisql execute failed, with index: ${index} retryNum: ${retryNum}, and will retry" + ) + ) + engineConnTask.getProperties.put("execute.error.code.index", index.toString) + return ErrorRetryExecuteResponse(e.message, index, e.t) + } else { + failedTasks.increase() + logger.error("execute code failed!", e.t) + return response + } + case SuccessExecuteResponse() => + engineExecutionContext.appendStdout("\n") + incomplete.setLength(0) + case e: OutputExecuteResponse => + incomplete.setLength(0) + val output = + if (StringUtils.isNotEmpty(e.getOutput) && e.getOutput.length > outputPrintLimit) { + e.getOutput.substring(0, outputPrintLimit) + } else e.getOutput + engineExecutionContext.appendStdout(output) + if (StringUtils.isNotBlank(e.getOutput)) engineExecutionContext.sendResultSet(e) + case _: IncompleteExecuteResponse => + incomplete ++= incompleteSplitter + } } }) Utils.tryCatch(engineExecutionContext.close()) { t => @@ -242,12 +334,15 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) response = response match { case _: OutputExecuteResponse => succeedTasks.increase() - transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed) SuccessExecuteResponse() case s: SuccessExecuteResponse => succeedTasks.increase() - transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed) s + case incompleteExecuteResponse: IncompleteExecuteResponse => + ErrorExecuteResponse( + s"The task cannot be an incomplete response ${incompleteExecuteResponse.message}", + null + ) case _ => response } response @@ -257,20 +352,34 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) } } - def execute(engineConnTask: EngineConnTask): ExecuteResponse = { + def execute(engineConnTask: EngineConnTask): ExecuteResponse = Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(engineConnTask.getProperties) + LoggerUtils.setJobIdMDC(jobId) logger.info(s"start to execute task ${engineConnTask.getTaskId}") updateLastActivityTime() beforeExecute(engineConnTask) taskCache.put(engineConnTask.getTaskId, engineConnTask) lastTask = engineConnTask val response = ensureOp { - toExecuteTask(engineConnTask) + val executeResponse = toExecuteTask(engineConnTask) + executeResponse match { + case successExecuteResponse: SuccessExecuteResponse => + transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed) + case errorExecuteResponse: ErrorExecuteResponse => + listenerBusContext.getEngineConnSyncListenerBus.postToAll( + TaskResponseErrorEvent(engineConnTask.getTaskId, errorExecuteResponse.message) + ) + transformTaskStatus(engineConnTask, ExecutionNodeStatus.Failed) + case _ => logger.warn(s"task get response is $executeResponse") + } + Utils.tryAndWarn(afterExecute(engineConnTask, executeResponse)) + executeResponse } - - Utils.tryAndWarn(afterExecute(engineConnTask, response)) logger.info(s"Finished to execute task ${engineConnTask.getTaskId}") // lastTask = null response + } { + LoggerUtils.removeJobIdMDC() } def setCodeParser(codeParser: CodeParser): Unit = this.codeParser = Some(codeParser) @@ -326,6 +435,33 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) } } + /** + * job task log print task params info + * + * @param engineExecutorContext + * @return + * Unit + */ + + def printTaskParamsLog(engineExecutorContext: EngineExecutionContext): Unit = { + val sb = new StringBuilder + EngineConnObject.getEngineCreationContext.getOptions.asScala.foreach({ case (key, value) => + // skip log jobId because it corresponding jobid when the ec created + if ( + !ComputationExecutorConf.PRINT_TASK_PARAMS_SKIP_KEYS.getValue + .split(",") + .exists(_.equals(key)) + ) { + sb.append(s"${key}=${value}\n") + } + }) + + sb.append("\n") + engineExecutorContext.appendStdout( + LogUtils.generateInfo(s"Your job exec with configs:\n${sb.toString()}\n") + ) + } + def transformTaskStatus(task: EngineConnTask, newStatus: ExecutionNodeStatus): Unit = { val oriStatus = task.getStatus logger.info(s"task ${task.getTaskId} from status $oriStatus to new status $newStatus") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala index a9447109d88..9584dd847cb 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala @@ -17,9 +17,15 @@ package org.apache.linkis.engineconn.computation.executor.execute +import org.apache.linkis.DataWorkCloudApplication.getApplicationContext +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.scheduler.executer.ExecuteResponse abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int = 1000) @@ -62,10 +68,47 @@ abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int override def afterExecute( engineConnTask: EngineConnTask, executeResponse: ExecuteResponse - ): Unit = {} + ): Unit = { + // execute once should try to shutdown + if (engineConnTask.getLables.exists(_.isInstanceOf[ExecuteOnceLabel])) { + if (!hasTaskRunning()) { + logger.warn( + s"engineConnTask(${engineConnTask.getTaskId}) is execute once, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } + // unhealthy node should try to shutdown + if (!hasTaskRunning() && currentEngineIsUnHealthy) { + logger.info("no task running and ECNode is unHealthy, now to mark engine to Finished.") + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } override def hasTaskRunning(): Boolean = { getRunningTask > 0 } + override def transition(toStatus: NodeStatus): Unit = { + if (getRunningTask >= getConcurrentLimit && NodeStatus.Unlock == toStatus) { + logger.info( + s"running task($getRunningTask) > concurrent limit:$getConcurrentLimit, can not to mark EC to Unlock" + ) + return + } + super.transition(toStatus) + } + + override def getConcurrentLimit: Int = { + var maxTaskNum = ComputationExecutorConf.ENGINE_CONCURRENT_THREAD_NUM.getValue - 5 + if (maxTaskNum <= 0) { + logger.error( + s"max task num cannot ${maxTaskNum} < 0, should set linkis.engineconn.concurrent.thread.num > 6" + ) + maxTaskNum = 1 + } + logger.info(s"max task num $maxTaskNum") + maxTaskNum + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala index 08124f2225a..6594f36bbaf 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala @@ -21,14 +21,17 @@ import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.cs.client.utils.ContextServiceUtils +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.acessible.executor.listener.event.{ TaskLogUpdateEvent, TaskProgressUpdateEvent, TaskResultCreateEvent, TaskResultSizeCreatedEvent } +import org.apache.linkis.engineconn.acessible.executor.log.LogHelper import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.cs.CSTableResultSetWriter +import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.engineconn.executor.ExecutorExecutionContext import org.apache.linkis.engineconn.executor.entity.Executor import org.apache.linkis.engineconn.executor.listener.{ @@ -187,9 +190,31 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String def appendStdout(log: String): Unit = if (executor.isInternalExecute) { logger.info(log) } else { - val listenerBus = getEngineSyncListenerBus - // jobId.foreach(jId => listenerBus.post(TaskLogUpdateEvent(jId, log))) - getJobId.foreach(jId => listenerBus.postToAll(TaskLogUpdateEvent(jId, log))) + var taskLog = log + val limitLength = ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH.getValue + val limitEnableObj = + properties.get(ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_ENABLED.key) + val limitEnable = + if (limitEnableObj == null) { + ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_ENABLED.getValue + } else { + limitEnableObj.toString.toBoolean + } + if (limitEnable) { + if (log.length > limitLength) { + taskLog = s"${log.substring(0, limitLength)}..." + logger.info( + "The log is too long and will be intercepted,log limit length : {}", + limitLength + ) + } + } + if (!AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getValue) { + LogHelper.cacheLog(taskLog) + } else { + val listenerBus = getEngineSyncListenerBus + getJobId.foreach(jId => listenerBus.postToAll(TaskLogUpdateEvent(jId, taskLog))) + } } override def close(): Unit = { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala index d22bf3f8007..e5ccd2bfbe7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala @@ -25,11 +25,6 @@ import org.apache.linkis.manager.common.entity.enumeration.NodeStatus class ComputationEngineConnHook extends CallbackEngineConnHook { - override protected def getNodeStatusOfStartSuccess( - engineCreationContext: EngineCreationContext, - engineConn: EngineConn - ): NodeStatus = NodeStatus.Unlock - override def afterEngineServerStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala index c23e28a3a38..f9164b2cf3d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala @@ -19,7 +19,9 @@ package org.apache.linkis.engineconn.computation.executor.hook import org.apache.linkis.common.utils.{ClassUtils, Logging, Utils} import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext +import org.apache.linkis.scheduler.executer.ExecuteResponse import scala.collection.JavaConverters.asScalaSetConverter import scala.collection.mutable.ArrayBuffer @@ -36,6 +38,11 @@ trait ComputationExecutorHook { codeBeforeHook: String ): String = codeBeforeHook + def afterExecutorExecute( + engineConnTask: EngineConnTask, + executeResponse: ExecuteResponse + ): Unit = {} + } object ComputationExecutorHook extends Logging { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala new file mode 100644 index 00000000000..454253748e0 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala @@ -0,0 +1,275 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.common.conf.Configuration.IS_VIEW_FS_ENV +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.conf.EngineConnConf +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.common.engineconn.EngineConn +import org.apache.linkis.engineconn.common.hook.EngineConnHook +import org.apache.linkis.engineconn.computation.executor.execute.{ + ComputationExecutor, + EngineExecutionContext +} +import org.apache.linkis.engineconn.core.engineconn.EngineConnManager +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.hadoop.common.conf.HadoopConf +import org.apache.linkis.hadoop.common.utils.HDFSUtils +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.{CodeLanguageLabel, RunType} +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import org.apache.linkis.rpc.Sender +import org.apache.linkis.udf.UDFClientConfiguration +import org.apache.linkis.udf.api.rpc.{RequestPythonModuleProtocol, ResponsePythonModuleProtocol} +import org.apache.linkis.udf.entity.PythonModuleInfoVO + +import org.apache.commons.lang3.StringUtils +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.{FileSystem, Path} + +import java.util +import java.util.{Collections, Comparator} + +import scala.collection.JavaConverters._ +import scala.collection.mutable + +abstract class PythonModuleLoad extends Logging { + + /** Abstract properties to be defined by the subclass */ + protected val engineType: String + protected val runType: RunType + + protected def getEngineType(): String = engineType + + protected def constructCode(pythonModuleInfo: PythonModuleInfoVO): String + + private def queryPythonModuleRpc( + userName: String, + engineType: String + ): java.util.List[PythonModuleInfoVO] = { + val infoList = Sender + .getSender(UDFClientConfiguration.UDF_SERVICE_NAME.getValue) + .ask(RequestPythonModuleProtocol(userName, engineType)) + .asInstanceOf[ResponsePythonModuleProtocol] + .getModulesInfo() + + // 使用Collections.sort()和Comparator进行排序 + if (infoList != null && !infoList.isEmpty) { + Collections.sort( + infoList, + new Comparator[PythonModuleInfoVO]() { + override def compare(o1: PythonModuleInfoVO, o2: PythonModuleInfoVO): Int = + Integer.compare(o1.getId.toInt, o1.getId.toInt) + } + ) + } + infoList + } + + protected def getLoadPythonModuleCode: Array[String] = { + val engineCreationContext = + EngineConnManager.getEngineConnManager.getEngineConn.getEngineCreationContext + val user = engineCreationContext.getUser + + var infoList: util.List[PythonModuleInfoVO] = + Utils.tryAndWarn(queryPythonModuleRpc(user, getEngineType())) + if (infoList == null) { + logger.info("rpc get info is empty.") + infoList = new util.ArrayList[PythonModuleInfoVO]() + } + + // 替换Viewfs + if (IS_VIEW_FS_ENV.getValue) { + infoList.asScala.foreach { info => + val path = info.getPath + logger.info(s"python path: ${path}") + if (path.startsWith("hdfs") || path.startsWith("viewfs")) { + info.setPath(path.replace("hdfs://", "viewfs://")) + } else { + info.setPath("viewfs://" + path) + } + } + } else { + + infoList.asScala.foreach { info => + val path = info.getPath + logger.info(s"hdfs python path: ${path}") + if (!path.startsWith("hdfs")) { + info.setPath("hdfs://" + path) + } + } + } + + logger.info(s"${user} load python modules: ") + infoList.asScala.foreach(l => logger.info(s"module name:${l.getName}, path:${l.getPath}\n")) + + // 创建加载code + val codes: mutable.Buffer[String] = infoList.asScala + .filter { info => StringUtils.isNotEmpty(info.getPath) } + .map(constructCode) + // 打印codes + val str: String = codes.mkString("\n") + logger.info(s"python codes: $str") + codes.toArray + } + + private def executeFunctionCode(codes: Array[String], executor: ComputationExecutor): Unit = { + if (null == codes || null == executor) { + return + } + codes.foreach { code => + logger.info("Submit function registration to engine, code: " + code) + Utils.tryCatch(executor.executeLine(new EngineExecutionContext(executor), code)) { + t: Throwable => + logger.error("Failed to load python module", t) + null + } + } + } + + /** + * Generate and execute the code necessary for loading Python modules. + * + * @param executor + * An object capable of executing code in the current engine context. + */ + protected def loadPythonModules(labels: Array[Label[_]]): Unit = { + + val codes = getLoadPythonModuleCode + logger.info(s"codes length: ${codes.length}") + if (null != codes && codes.nonEmpty) { + val executor = ExecutorManager.getInstance.getExecutorByLabels(labels) + if (executor != null) { + val className = executor.getClass.getName + logger.info(s"executor class: ${className}") + } else { + logger.error(s"Failed to load python, executor is null") + } + + executor match { + case computationExecutor: ComputationExecutor => + executeFunctionCode(codes, computationExecutor) + case _ => + } + } + logger.info(s"Successful to load python, engineType : ${engineType}") + } + +} + +abstract class PythonModuleLoadEngineConnHook + extends PythonModuleLoad + with EngineConnHook + with Logging { + + override def afterExecutionExecute( + engineCreationContext: EngineCreationContext, + engineConn: EngineConn + ): Unit = { + Utils.tryAndWarnMsg { + val codeLanguageLabel = new CodeLanguageLabel + codeLanguageLabel.setCodeType(runType.toString) + logger.info(s"engineType: ${engineType}") + val labels = Array[Label[_]](codeLanguageLabel) + loadPythonModules(labels) + }(s"Failed to load Python Modules: ${engineType}") + + } + + override def afterEngineServerStartFailed( + engineCreationContext: EngineCreationContext, + throwable: Throwable + ): Unit = { + logger.error(s"Failed to start Engine Server: ${throwable.getMessage}", throwable) + } + + override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = { + logger.info("Preparing to load Python Module...") + } + + override def beforeExecutionExecute( + engineCreationContext: EngineCreationContext, + engineConn: EngineConn + ): Unit = { + logger.info(s"Before executing command on load Python Module.") + } + +} + +// 加载PySpark的Python模块 +class PythonSparkEngineHook extends PythonModuleLoadEngineConnHook { + + // 设置engineType属性为"spark",表示此挂钩适用于Spark数据处理引擎 + override val engineType: String = "spark" + + // 设置runType属性为RunType.PYSPARK,表示此挂钩将执行PySpark类型的代码 + override protected val runType: RunType = RunType.PYSPARK + + // 重写constructCode方法,用于根据Python模块信息构造加载模块的代码 + override protected def constructCode(pythonModuleInfo: PythonModuleInfoVO): String = { + // 使用pythonModuleInfo的path属性,构造SparkContext.addPyFile的命令字符串 + // 这个命令在PySpark环境中将模块文件添加到所有worker上,以便在代码中可以使用 + val path: String = pythonModuleInfo.getPath + val loadCode = s"sc.addPyFile('${path}')" + logger.info(s"pythonLoadCode: ${loadCode}") + loadCode + } + +} + +// 加载Python的Python模块 +class PythonEngineHook extends PythonModuleLoadEngineConnHook { + + // 设置engineType属性为"python",表示此挂钩适用于python引擎 + override val engineType: String = "python" + + // 设置runType属性为RunType.PYTHON,表示此挂钩将执行python类型的代码 + override protected val runType: RunType = RunType.PYTHON + + // 重写constructCode方法,用于根据Python模块信息构造加载模块的代码 + override protected def constructCode(pythonModuleInfo: PythonModuleInfoVO): String = { + // 处理文件 + val path: String = pythonModuleInfo.getPath + val engineCreationContext: EngineCreationContext = + EngineConnManager.getEngineConnManager.getEngineConn.getEngineCreationContext + val user: String = engineCreationContext.getUser + + var loadCode: String = null + logger.info(s"gen code in constructCode") + Utils.tryAndWarn({ + // 获取引擎临时目录 + var tmpDir: String = EngineConnConf.getEngineTmpDir + if (!tmpDir.endsWith("/")) { + tmpDir += "/" + } + val fileName: String = new java.io.File(path).getName + val destPath: String = tmpDir + fileName + val config: Configuration = HDFSUtils.getConfiguration(HadoopConf.HADOOP_ROOT_USER.getValue) + val fs: FileSystem = HDFSUtils.getHDFSUserFileSystem(user, null, config) + fs.copyToLocalFile(new Path(path), new Path("file://" + destPath)) + if (fileName.endsWith("zip")) { + tmpDir += fileName + } + loadCode = s"import sys; sys.path.append('${tmpDir}')" + logger.info(s"5 load local python code: ${loadCode} in path: $destPath") + }) + loadCode + } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala index 91af2811a6f..594db0afe7c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala @@ -103,7 +103,13 @@ abstract class UDFLoad extends Logging { } logger.info("all udfs: ") + // 判断是否加载了特殊udf + val udfNames: String = ComputationExecutorConf.SPECIAL_UDF_NAMES.getValue udfInfos.foreach { l => + if (StringUtils.isNotBlank(udfNames) && udfNames.split(",").exists(l.getUdfName.contains)) { + logger.info(s"add spacial udf check for job with udfNames: {}", udfNames) + System.getProperties.put(ComputationExecutorConf.ONLY_SQL_USE_UDF_KEY, udfNames) + } logger.info( s"udfName:${l.getUdfName}, bml_resource_id:${l.getBmlResourceId}, bml_id:${l.getId}\n" ) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala index 77616944d25..9469c006858 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala @@ -28,8 +28,11 @@ import org.apache.linkis.engineconn.computation.executor.execute.EngineExecution import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext +import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel +import java.util + class ExecuteOnceHook extends ComputationExecutorHook with ExecutorLockListener with Logging { private var executeOnce = false @@ -47,6 +50,11 @@ class ExecuteOnceHook extends ComputationExecutorHook with ExecutorLockListener codeBeforeHook: String ): String = { executeOnce = engineExecutionContext.getLabels.exists(_.isInstanceOf[ExecuteOnceLabel]) + val creationLabelList: util.List[Label[_]] = engineCreationContext.getLabels() + if (creationLabelList != null) { + executeOnce = + executeOnce || creationLabelList.toArray().exists(_.isInstanceOf[ExecuteOnceLabel]) + } if (executeOnce && !isRegister) { isRegister = true asyncListenerBusContext.addListener(this) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala index f96896f5577..4446bdc672f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala @@ -91,4 +91,12 @@ object ComputationEngineConnMetrics { getTotalBusyTimeMills(nodeStatus) + getTotalIdleTimeMills(nodeStatus) def getUnlockToShutdownDurationMills(): Long = unlockToShutdownDurationMills.get() + + def getLastUnlockTimestamp(nodeStatus: NodeStatus): Long = { + nodeStatus match { + case NodeStatus.Unlock => lastUnlockTimeMills + case _ => 0 + } + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala index 1f8c491ced4..55b88f520b6 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala @@ -19,7 +19,8 @@ package org.apache.linkis.engineconn.computation.executor.rs import org.apache.linkis.common.io.{MetaData, Record} import org.apache.linkis.common.io.resultset.ResultSetWriter -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.storage.LineRecord @@ -45,6 +46,9 @@ class RsOutputStream extends OutputStream with Logging { } def reset(engineExecutionContext: EngineExecutionContext): Unit = { + if (ComputationExecutorConf.CLOSE_RS_OUTPUT_WHEN_RESET_BY_DEFAULT_ENABLED) { + Utils.tryQuietly(close()) + } writer = engineExecutionContext.createDefaultResultSetWriter() writer.addMetaData(null) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala index eccf54bfad5..e5d74282de3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala @@ -21,10 +21,13 @@ import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.info.NodeHeartbeatMsgManager import org.apache.linkis.engineconn.computation.executor.metrics.ComputationEngineConnMetrics import org.apache.linkis.engineconn.core.EngineConnObject -import org.apache.linkis.engineconn.executor.entity.{Executor, SensibleExecutor} +import org.apache.linkis.engineconn.executor.entity.{Executor, SensibleExecutor, YarnExecutor} import org.apache.linkis.governance.common.constant.ec.ECConstants +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.server.BDPJettyServerHelper +import org.apache.commons.lang3.StringUtils + import org.springframework.stereotype.Component import java.util @@ -63,6 +66,10 @@ class DefaultNodeHeartbeatMsgManager extends NodeHeartbeatMsgManager with Loggin ECConstants.EC_TOTAL_LOCK_TIME_MILLS_KEY, ComputationEngineConnMetrics.getTotalLockTimeMills(status).asInstanceOf[Object] ) + msgMap.put( + ECConstants.EC_LAST_UNLOCK_TIMESTAMP, + ComputationEngineConnMetrics.getLastUnlockTimestamp(status).asInstanceOf[Object] + ) case _ => } val engineParams = EngineConnObject.getEngineCreationContext.getOptions @@ -72,6 +79,22 @@ class DefaultNodeHeartbeatMsgManager extends NodeHeartbeatMsgManager with Loggin engineParams.get(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY).asInstanceOf[Object] ) } + executor match { + case yarnExecutor: YarnExecutor => + if (StringUtils.isNotBlank(yarnExecutor.getQueue)) { + msgMap.put(ECConstants.YARN_QUEUE_NAME_KEY, yarnExecutor.getQueue) + } + if (StringUtils.isNotBlank(yarnExecutor.getApplicationId)) { + msgMap.put(ECConstants.YARN_APPID_NAME_KEY, yarnExecutor.getApplicationId) + } + if (StringUtils.isNotBlank(yarnExecutor.getApplicationURL)) { + msgMap.put(ECConstants.YARN_APP_URL_KEY, yarnExecutor.getApplicationURL) + } + if (StringUtils.isNotBlank(yarnExecutor.getYarnMode)) { + msgMap.put(ECConstants.YARN_MODE_KEY, yarnExecutor.getYarnMode) + } + case _ => + } Utils.tryCatch(BDPJettyServerHelper.gson.toJson(msgMap)) { case e: Exception => val msgs = msgMap.asScala .map { case (k, v) => if (null == v) s"${k}->null" else s"${k}->${v.toString}" } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala index 651fc0f3dca..3739f47b541 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala @@ -18,7 +18,7 @@ package org.apache.linkis.engineconn.computation.executor.service import org.apache.linkis.common.listener.Event -import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.listener.LogListener import org.apache.linkis.engineconn.acessible.executor.listener.event._ import org.apache.linkis.engineconn.acessible.executor.log.LogHelper @@ -34,7 +34,6 @@ import org.apache.linkis.engineconn.computation.executor.execute.{ ComputationExecutor, ConcurrentComputationExecutor } -import org.apache.linkis.engineconn.computation.executor.hook.ExecutorLabelsRestHook import org.apache.linkis.engineconn.computation.executor.listener.{ ResultSetListener, TaskProgressListener, @@ -50,7 +49,6 @@ import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ResourceFetchExecutor import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineconn.executor.listener.event.EngineConnSyncEvent -import org.apache.linkis.engineconn.launch.EngineConnServer import org.apache.linkis.governance.common.constant.ec.ECConstants import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.exception.engineconn.{ @@ -58,14 +56,14 @@ import org.apache.linkis.governance.common.exception.engineconn.{ EngineConnExecutorErrorException } import org.apache.linkis.governance.common.protocol.task._ -import org.apache.linkis.governance.common.utils.JobUtils -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.hadoop.common.utils.KerberosUtils import org.apache.linkis.manager.common.protocol.resource.{ ResponseTaskRunningInfo, ResponseTaskYarnResource } -import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.protocol.constants.TaskConstant import org.apache.linkis.protocol.message.RequestProtocol import org.apache.linkis.rpc.Sender @@ -73,6 +71,7 @@ import org.apache.linkis.rpc.message.annotation.Receiver import org.apache.linkis.rpc.utils.RPCUtils import org.apache.linkis.scheduler.executer.{ ErrorExecuteResponse, + ErrorRetryExecuteResponse, ExecuteResponse, IncompleteExecuteResponse, SubmitResponse @@ -88,6 +87,7 @@ import org.springframework.stereotype.Component import javax.annotation.PostConstruct import java.util +import java.util.Map import java.util.concurrent._ import java.util.concurrent.atomic.AtomicInteger @@ -108,19 +108,12 @@ class TaskExecutionServiceImpl private lazy val executorManager = ExecutorManager.getInstance private val taskExecutedNum = new AtomicInteger(0) private var lastTask: EngineConnTask = _ - private var lastTaskFuture: Future[_] = _ + private var syncLastTaskThread: Thread = _ private var lastTaskDaemonFuture: Future[_] = _ - // for concurrent executor - private var consumerThread: Thread = _ - private var concurrentTaskQueue: BlockingQueue[EngineConnTask] = _ - @Autowired private var lockService: LockService = _ - private val asyncListenerBusContext = - ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus - private val syncListenerBus = ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnSyncListenerBus @@ -135,18 +128,19 @@ class TaskExecutionServiceImpl "ConcurrentEngineConnThreadPool" ) - private val CONCURRENT_TASK_LOCKER = new Object - private val taskAsyncSubmitExecutor: ExecutionContextExecutorService = Utils.newCachedExecutionContext( ComputationExecutorConf.TASK_ASYNC_MAX_THREAD_SIZE, - "TaskExecution-Thread-" + ComputationEngineConstant.TASK_EXECUTION_THREAD ) @PostConstruct def init(): Unit = { LogHelper.setLogListener(this) syncListenerBus.addListener(this) + if (ComputationExecutorConf.ENGINE_KERBEROS_AUTO_REFRESH_ENABLED) { + KerberosUtils.startKerberosRefreshThread() + } } private def sendToEntrance(task: EngineConnTask, msg: RequestProtocol): Unit = { @@ -169,83 +163,121 @@ class TaskExecutionServiceImpl } } + /** + * submit to async thread return submit response + * @param requestTask + * @param sender + * @return + */ @Receiver - override def execute(requestTask: RequestTask, sender: Sender): ExecuteResponse = { - - // check lock - logger.info("Received a new task, task content is " + requestTask) - if (StringUtils.isBlank(requestTask.getLock)) { - logger.error(s"Invalid lock : ${requestTask.getLock} , requestTask : " + requestTask) - return ErrorExecuteResponse( - s"Invalid lock : ${requestTask.getLock}.", - new EngineConnExecutorErrorException( - EngineConnExecutorErrorCode.INVALID_PARAMS, - "Invalid lock or code(请获取到锁后再提交任务.)" + override def execute(requestTask: RequestTask, sender: Sender): ExecuteResponse = + Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(requestTask.getProperties) + LoggerUtils.setJobIdMDC(jobId) + // check lock + logger.info("Received a new task, task content is " + requestTask) + if (StringUtils.isBlank(requestTask.getLock)) { + logger.error(s"Invalid lock : ${requestTask.getLock} , requestTask : " + requestTask) + return ErrorExecuteResponse( + s"Invalid lock : ${requestTask.getLock}.", + new EngineConnExecutorErrorException( + EngineConnExecutorErrorCode.INVALID_PARAMS, + "Invalid lock or code(请获取到锁后再提交任务.)" + ) ) - ) - } - if (!lockService.isLockExist(requestTask.getLock)) { - logger.error(s"Lock ${requestTask.getLock} not exist, cannot execute.") - return ErrorExecuteResponse( - "Lock not exixt", - new EngineConnExecutorErrorException( - EngineConnExecutorErrorCode.INVALID_LOCK, - "Lock : " + requestTask.getLock + " not exist(您的锁无效,请重新获取后再提交)." + } + if (!lockService.isLockExist(requestTask.getLock)) { + logger.error(s"Lock ${requestTask.getLock} not exist, cannot execute.") + return ErrorExecuteResponse( + "Lock not exixt", + new EngineConnExecutorErrorException( + EngineConnExecutorErrorCode.INVALID_LOCK, + "Lock : " + requestTask.getLock + " not exist(您的锁无效,请重新获取后再提交)." + ) ) - ) - } + } - if (StringUtils.isBlank(requestTask.getCode)) { - return IncompleteExecuteResponse( - "Your code is incomplete, it may be that only comments are selected for execution(您的代码不完整,可能是仅仅选中了注释进行执行)" - ) - } + if (StringUtils.isBlank(requestTask.getCode)) { + return IncompleteExecuteResponse( + "Your code is incomplete, it may be that only comments are selected for execution(您的代码不完整,可能是仅仅选中了注释进行执行)" + ) + } - val taskId: Int = taskExecutedNum.incrementAndGet() - val retryAble: Boolean = { - val retry = - requestTask.getProperties.getOrDefault(ComputationEngineConstant.RETRYABLE_TYPE_NAME, null) - if (null != retry) retry.asInstanceOf[Boolean] - else false - } - val jobId = JobUtils.getJobIdFromMap(requestTask.getProperties) - if (StringUtils.isNotBlank(jobId)) { - System.getProperties.put(ComputationExecutorConf.JOB_ID_TO_ENV_KEY, jobId) - logger.info(s"Received job with id ${jobId}.") - } - val task = new CommonEngineConnTask(String.valueOf(taskId), retryAble) - task.setCode(requestTask.getCode) - task.setProperties(requestTask.getProperties) - task.data(ComputationEngineConstant.LOCK_TYPE_NAME, requestTask.getLock) - task.setStatus(ExecutionNodeStatus.Scheduled) - val labels = requestTask.getLabels.asScala.toArray - task.setLabels(labels) - val entranceServerInstance = RPCUtils.getServiceInstanceFromSender(sender) - task.setCallbackServiceInstance(entranceServerInstance) - logger.info(s"task $taskId submit executor to execute") - val runnable = new Runnable { - override def run(): Unit = Utils.tryCatch { - // Waiting to run, preventing task messages from being sent to submit services before SubmitResponse, such as entry - Thread.sleep(ComputationExecutorConf.TASK_SUBMIT_WAIT_TIME_MS) - submitTaskToExecutor(task, labels) match { - case ErrorExecuteResponse(message, throwable) => - sendToEntrance(task, ResponseTaskError(task.getTaskId, message)) - logger.error(message, throwable) - sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) - case _ => + val taskId: String = if (StringUtils.isNotBlank(jobId)) { + jobId + } else { + String.valueOf(taskExecutedNum.incrementAndGet()) + } + val retryAble: Boolean = { + val retry = + requestTask.getProperties.getOrDefault( + ComputationEngineConstant.RETRYABLE_TYPE_NAME, + null + ) + if (null != retry) retry.asInstanceOf[Boolean] + else false + } + + if (StringUtils.isNotBlank(jobId)) { + System.getProperties.put(ComputationExecutorConf.JOB_ID_TO_ENV_KEY, jobId) + logger.info(s"Received job with id ${jobId}.") + } + + // only sql can use udf check, udfName set in UDFLoad + val codeType: String = LabelUtil.getCodeType(requestTask.getLabels) + val languageType: String = CodeAndRunTypeUtils.getLanguageTypeByCodeType(codeType) + System.getProperties.put(ComputationExecutorConf.CODE_TYPE, languageType) + logger.info(s"add spacial udf check for job ${jobId} with codeType: {}", languageType) + + val task = new CommonEngineConnTask(taskId, retryAble) + task.setCode(requestTask.getCode) + task.setProperties(requestTask.getProperties) + task.data(ComputationEngineConstant.LOCK_TYPE_NAME, requestTask.getLock) + task.setStatus(ExecutionNodeStatus.Scheduled) + val labels = requestTask.getLabels.asScala.toArray + task.setLabels(labels) + val entranceServerInstance = RPCUtils.getServiceInstanceFromSender(sender) + task.setCallbackServiceInstance(entranceServerInstance) + logger.info(s"task $taskId submit executor to execute") + val runnable = new Runnable { + override def run(): Unit = Utils.tryCatch { + // Waiting to run, preventing task messages from being sent to submit services before SubmitResponse, such as entry + Thread.sleep(ComputationExecutorConf.TASK_SUBMIT_WAIT_TIME_MS) + LoggerUtils.setJobIdMDC(jobId) + submitTaskToExecutor(task, labels) match { + case ErrorExecuteResponse(message, throwable) => + sendToEntrance(task, ResponseTaskError(task.getTaskId, message)) + logger.error(message, throwable) + sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) + case ErrorRetryExecuteResponse(message, index, throwable) => + sendToEntrance(task, ResponseTaskError(task.getTaskId, message)) + logger.error(message, throwable) + sendToEntrance( + task, + new ResponseTaskStatusWithExecuteCodeIndex( + task.getTaskId, + ExecutionNodeStatus.Failed, + index + ) + ) + case _ => + } + LoggerUtils.removeJobIdMDC() + } { t => + logger.warn("Failed to submit task ", t) + LoggerUtils.removeJobIdMDC() + sendToEntrance( + task, + ResponseTaskError(task.getTaskId, ExceptionUtils.getRootCauseMessage(t)) + ) + sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) } - } { t => - logger.warn("Failed to submit task ", t) - sendToEntrance( - task, - ResponseTaskError(task.getTaskId, ExceptionUtils.getRootCauseMessage(t)) - ) - sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) } + val submitTaskToExecutorFuture = taskAsyncSubmitExecutor.submit(runnable) + SubmitResponse(task.getTaskId) + } { + LoggerUtils.removeJobIdMDC() } - val submitTaskToExecutorFuture = taskAsyncSubmitExecutor.submit(runnable) - SubmitResponse(task.getTaskId) - } private def submitTaskToExecutor( task: CommonEngineConnTask, @@ -256,11 +288,11 @@ class TaskExecutionServiceImpl case computationExecutor: ComputationExecutor => taskIdCache.put(task.getTaskId, computationExecutor) submitTask(task, computationExecutor) - case o => + case _ => val labelsStr = if (labels != null) labels.filter(_ != null).map(_.getStringValue).mkString(",") else "" val msg = - "Invalid computationExecutor : " + o.getClass.getName + ", labels : " + labelsStr + ", requestTask : " + task.getTaskId + "Invalid computationExecutor : " + executor.getClass.getName + ", labels : " + labelsStr + ", requestTask : " + task.getTaskId logger.error(msg) ErrorExecuteResponse( "Invalid computationExecutor(生成无效的计算引擎,请联系管理员).", @@ -269,19 +301,6 @@ class TaskExecutionServiceImpl } } - private def restExecutorLabels(labels: Array[Label[_]]): Array[Label[_]] = { - var newLabels = labels - ExecutorLabelsRestHook.getExecutorLabelsRestHooks.foreach(hooke => - newLabels = hooke.restExecutorLabels(newLabels) - ) - newLabels - } - - // override def taskStatus(taskID: String): ResponseTaskStatus = { - // val task = taskIdCache.get(taskID) - // ResponseTaskStatus(taskID, task.getStatus.id) - // } - private def submitTask( task: CommonEngineConnTask, computationExecutor: ComputationExecutor @@ -301,105 +320,60 @@ class TaskExecutionServiceImpl task: CommonEngineConnTask, computationExecutor: ComputationExecutor ): ExecuteResponse = { - var response: ExecuteResponse = SubmitResponse(task.getTaskId) - Utils.tryCatch { - computationExecutor.execute(task) - } { t => - logger.error(s"Failed to submit task${task.getTaskId} ", t) - response = ErrorExecuteResponse("Failed to submit task", t) - null - } - response + computationExecutor.execute(task) } private def submitSyncTask( task: CommonEngineConnTask, computationExecutor: ComputationExecutor ): ExecuteResponse = { - val runTask = new Runnable { - override def run(): Unit = Utils.tryAndWarn { - LogHelper.dropAllRemainLogs() - executeTask(task, computationExecutor) - } - } + LogHelper.dropAllRemainLogs() lastTask = task - lastTaskFuture = Utils.defaultScheduler.submit(runTask) - lastTaskDaemonFuture = openDaemonForTask(task, lastTaskFuture, Utils.defaultScheduler) - SubmitResponse(task.getTaskId) + syncLastTaskThread = Thread.currentThread() + lastTaskDaemonFuture = openDaemonForTask(task, Utils.defaultScheduler) + val res = executeTask(task, computationExecutor) + res } private def submitConcurrentTask( task: CommonEngineConnTask, executor: ConcurrentComputationExecutor ): ExecuteResponse = { - if (null == concurrentTaskQueue) CONCURRENT_TASK_LOCKER.synchronized { - if (null == concurrentTaskQueue) { - concurrentTaskQueue = new LinkedBlockingDeque[EngineConnTask]() - } - } - concurrentTaskQueue.put(task) - if (null == consumerThread) CONCURRENT_TASK_LOCKER.synchronized { - if (null == consumerThread) { - consumerThread = new Thread(createConsumerRunnable(executor)) - consumerThread.setDaemon(true) - consumerThread.setName("ConcurrentTaskQueueFifoConsumerThread") - consumerThread.start() - } - } - SubmitResponse(task.getTaskId) - } - - private def createConsumerRunnable(executor: ComputationExecutor): Thread = { - val consumerRunnable = new Runnable { + val concurrentJob = new Runnable { override def run(): Unit = { - var errCount = 0 - val ERR_COUNT_MAX = 20 - while (true) { - Utils.tryCatch { - if (!executor.isBusy && !executor.isClosed) { - val task = concurrentTaskQueue.take() - val concurrentJob = new Runnable { - override def run(): Unit = { - lastTask = task - Utils.tryCatch { - logger.info(s"Start to run task ${task.getTaskId}") - executeTask(task, executor) - } { case t: Throwable => - errCount += 1 - logger.error(s"Execute task ${task.getTaskId} failed :", t) - if (errCount > ERR_COUNT_MAX) { - logger.error( - s"Executor run failed for ${errCount} times over ERROR_COUNT_MAX : ${ERR_COUNT_MAX}, will shutdown." - ) - executor.transition(NodeStatus.ShuttingDown) - } - } - } - } - cachedThreadPool.submit(concurrentJob) - } - Thread.sleep(20) - } { case t: Throwable => - logger.error(s"consumerThread failed :", t) - } + Utils.tryCatch { + val jobId = JobUtils.getJobIdFromMap(task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + logger.info(s"Start to run task ${task.getTaskId}") + executeTask(task, executor) + } { case t: Throwable => + logger.warn("Failed to execute task ", t) + sendToEntrance( + task, + ResponseTaskError(task.getTaskId, ExceptionUtils.getRootCauseMessage(t)) + ) + sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) + LoggerUtils.removeJobIdMDC() + null } } } - new Thread(consumerRunnable) + Utils.tryCatch(cachedThreadPool.submit(concurrentJob)) { case e: Exception => + logger.error(s"Failed to submit task ${task.getTaskId}", e) + throw e + } + SubmitResponse(task.getTaskId) } - private def executeTask(task: EngineConnTask, executor: ComputationExecutor): Unit = { - val response = executor.execute(task) - response match { - case ErrorExecuteResponse(message, throwable) => - sendToEntrance(task, ResponseTaskError(task.getTaskId, message)) - logger.error(message, throwable) - LogHelper.pushAllRemainLogs() - executor.transformTaskStatus(task, ExecutionNodeStatus.Failed) - case _ => logger.warn(s"task get response is $response") + private def executeTask(task: EngineConnTask, executor: ComputationExecutor): ExecuteResponse = + Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + executor.execute(task) + } { + clearCache(task.getTaskId) + LoggerUtils.removeJobIdMDC() } - clearCache(task.getTaskId) - } /** * Open daemon thread @@ -410,79 +384,98 @@ class TaskExecutionServiceImpl * scheduler * @return */ - private def openDaemonForTask( - task: EngineConnTask, - taskFuture: Future[_], - scheduler: ExecutorService - ): Future[_] = { + private def openDaemonForTask(task: EngineConnTask, scheduler: ExecutorService): Future[_] = { val sleepInterval = ComputationExecutorConf.ENGINE_PROGRESS_FETCH_INTERVAL.getValue scheduler.submit(new Runnable { - override def run(): Unit = Utils.tryAndWarn { + override def run(): Unit = { + logger.info( + s"start progress daemon thread for task ${task.getTaskId}, status ${task.getStatus}" + ) Utils.tryQuietly(Thread.sleep(TimeUnit.MILLISECONDS.convert(1, TimeUnit.SECONDS))) - while (null != taskFuture && !taskFuture.isDone) { - if ( - ExecutionNodeStatus.isCompleted(task.getStatus) || ExecutionNodeStatus - .isRunning(task.getStatus) - ) { - val progressResponse = taskProgress(task.getTaskId) - val resourceResponse: ResponseTaskYarnResource = - taskYarnResource(task.getTaskId) match { - case responseTaskYarnResource: ResponseTaskYarnResource => - if ( - responseTaskYarnResource.resourceMap != null && !responseTaskYarnResource.resourceMap.isEmpty - ) { - responseTaskYarnResource - } else { - null - } - case _ => - null - } - val extraInfoMap = new util.HashMap[String, Object]() - extraInfoMap.put(TaskConstant.ENGINE_INSTANCE, Sender.getThisInstance) - extraInfoMap.put( - ECConstants.EC_TICKET_ID_KEY, - EngineConnObject.getEngineCreationContext.getTicketId - ) - val ecParams = EngineConnObject.getEngineCreationContext.getOptions - if (ecParams.containsKey(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY)) { - extraInfoMap.put( - ECConstants.YARN_QUEUE_NAME_KEY, - ecParams.get(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY) - ) + while (!ExecutionNodeStatus.isCompleted(task.getStatus)) { + Utils.tryAndWarn { + val progressResponse = Utils.tryCatch(taskProgress(task.getTaskId)) { + case e: Exception => + logger.info("Failed to get progress", e) + null } - extraInfoMap.put(TaskConstant.ENGINE_CONN_TASK_ID, task.getTaskId) - extraInfoMap.put( - TaskConstant.ENGINE_CONN_SUBMIT_TIME, - System.currentTimeMillis.toString - ) - // todo add other info - var respRunningInfo: ResponseTaskRunningInfo = null - if (null != resourceResponse) { - respRunningInfo = ResponseTaskRunningInfo( - progressResponse.execId, - progressResponse.progress, - progressResponse.progressInfo, - resourceResponse.resourceMap, - extraInfoMap - ) + val resourceResponse = Utils.tryCatch(buildResourceMap(task)) { case e: Exception => + logger.info("Failed to get resource", e) + null + } + val extraInfoMap = Utils.tryCatch(buildExtraInfoMap(task)) { case e: Exception => + logger.info("Failed to get extra info ", e) + null + } + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null + + /** + * It is guaranteed that there must be progress the progress must be greater than or + * equal to 0.1 + */ + val newProgressResponse = if (null == progressResponse) { + ResponseTaskProgress(task.getTaskId, 0.1f, null) + } else if (progressResponse.progress < 0.1f) { + ResponseTaskProgress(task.getTaskId, 0.1f, progressResponse.progressInfo) } else { - respRunningInfo = ResponseTaskRunningInfo( - progressResponse.execId, - progressResponse.progress, - progressResponse.progressInfo, - null, - extraInfoMap - ) + progressResponse } + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( + newProgressResponse.execId, + newProgressResponse.progress, + newProgressResponse.progressInfo, + resourceMap, + extraInfoMap + ) sendToEntrance(task, respRunningInfo) - Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) } + Utils.tryQuietly( + Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) + ) } + logger.info( + s"End progress daemon thread exit task ${task.getTaskId}, status ${task.getStatus}" + ) } }) } + private def buildExtraInfoMap(task: EngineConnTask): util.HashMap[String, Object] = { + val extraInfoMap = new util.HashMap[String, Object]() + extraInfoMap.put(TaskConstant.ENGINE_INSTANCE, Sender.getThisInstance) + extraInfoMap.put( + ECConstants.EC_TICKET_ID_KEY, + EngineConnObject.getEngineCreationContext.getTicketId + ) + val ecParams = EngineConnObject.getEngineCreationContext.getOptions + if (ecParams.containsKey(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY)) { + extraInfoMap.put( + ECConstants.YARN_QUEUE_NAME_KEY, + ecParams.get(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY) + ) + } + extraInfoMap.put(TaskConstant.ENGINE_CONN_TASK_ID, task.getTaskId) + extraInfoMap.put(TaskConstant.ENGINE_CONN_SUBMIT_TIME, System.currentTimeMillis.toString) + extraInfoMap + } + + private def buildResourceMap(task: EngineConnTask): ResponseTaskYarnResource = { + val resourceResponse: ResponseTaskYarnResource = + taskYarnResource(task.getTaskId) match { + case responseTaskYarnResource: ResponseTaskYarnResource => + if ( + responseTaskYarnResource.resourceMap != null && !responseTaskYarnResource.resourceMap.isEmpty + ) { + responseTaskYarnResource + } else { + null + } + case _ => + null + } + resourceResponse + } + private def taskYarnResource(taskID: String): ResponseTaskYarnResource = { val executor = taskIdCache.getIfPresent(taskID) executor match { @@ -494,7 +487,7 @@ class TaskExecutionServiceImpl } override def taskProgress(taskID: String): ResponseTaskProgress = { - var response = ResponseTaskProgress(taskID, 0, null) + var response = ResponseTaskProgress(taskID, 0.01f, null) if (StringUtils.isBlank(taskID)) return response val executor = taskIdCache.getIfPresent(taskID) if (null != executor) { @@ -503,19 +496,15 @@ class TaskExecutionServiceImpl if (ExecutionNodeStatus.isCompleted(task.getStatus)) { response = ResponseTaskProgress(taskID, 1.0f, null) } else { + val progress = executor.progress(taskID) + logger.info("The latest progress {} of the task id {} is:", progress, taskID) response = Utils.tryQuietly( - ResponseTaskProgress( - taskID, - executor.progress(taskID), - executor.getProgressInfo(taskID) - ) + ResponseTaskProgress(taskID, progress, executor.getProgressInfo(taskID)) ) } - } else { - response = ResponseTaskProgress(taskID, -1, null) } } else { - logger.error(s"Executor of taskId : $taskID is not cached.") + logger.info(s"Executor of taskId : $taskID is not cached.") } response } @@ -533,16 +522,20 @@ class TaskExecutionServiceImpl override def killTask(taskID: String): Unit = { val executor = taskIdCache.getIfPresent(taskID) if (null != executor) { - executor.killTask(taskID) + Utils.tryAndWarn(executor.killTask(taskID)) logger.info(s"TaskId : ${taskID} was killed by user.") } else { logger.error(s"Kill failed, got invalid executor : null for taskId : ${taskID}") } if (null != lastTask && lastTask.getTaskId.equalsIgnoreCase(taskID)) { - if (null != lastTaskFuture && !lastTaskFuture.isDone) { - Utils.tryAndWarn { - lastTaskFuture.cancel(true) - } + if (null != syncLastTaskThread) { + logger.info(s"try to interrupt thread:${taskID}") + Utils.tryAndWarn(syncLastTaskThread.interrupt()) + logger.info(s"thread isInterrupted:${taskID}") + } else { + logger.info(s"skip to force stop thread:${taskID}") + } + if (null != lastTaskDaemonFuture && !lastTaskDaemonFuture.isDone) { Utils.tryAndWarn { // Close the daemon also lastTaskDaemonFuture.cancel(true) @@ -563,7 +556,7 @@ class TaskExecutionServiceImpl } else { val msg = "Task null! requestTaskStatus: " + ComputationEngineUtils.GSON.toJson(requestTaskStatus) - logger.error(msg) + logger.info(msg) ResponseTaskStatus(requestTaskStatus.execId, ExecutionNodeStatus.Cancelled) } } @@ -603,14 +596,14 @@ class TaskExecutionServiceImpl logger.warn("Unknown event : " + BDPJettyServerHelper.gson.toJson(event)) } - override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = { + override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = Utils.tryAndWarn { if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { if (null != logUpdateEvent && StringUtils.isNotBlank(logUpdateEvent.taskId)) { val task = getTaskByTaskId(logUpdateEvent.taskId) if (null != task) { sendToEntrance(task, ResponseTaskLog(logUpdateEvent.taskId, logUpdateEvent.log)) } else { - logger.error("Task cannot null! logupdateEvent: " + logUpdateEvent.taskId) + logger.info("Task cannot null! logupdateEvent: " + logUpdateEvent.taskId) } } else if (null != lastTask) { val executor = executorManager.getReportExecutor @@ -636,7 +629,6 @@ class TaskExecutionServiceImpl val task = getTaskByTaskId(taskStatusChangedEvent.taskId) if (null != task) { if (ExecutionNodeStatus.isCompleted(taskStatusChangedEvent.toStatus)) { - lastTask = task LogHelper.pushAllRemainLogs() } val toStatus = taskStatusChangedEvent.toStatus @@ -653,33 +645,40 @@ class TaskExecutionServiceImpl logger.info(s"task ${task.getTaskId} status $toStatus will not be send to entrance") } } else { - logger.error( + logger.info( "Task cannot null! taskStatusChangedEvent: " + ComputationEngineUtils.GSON .toJson(taskStatusChangedEvent) ) } } - override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = { - if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { - val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) - if (null != task) { - sendToEntrance( - task, - ResponseTaskProgress( + override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = + Utils.tryAndWarn { + if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { + val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) + if (null != task) { + val resourceResponse = buildResourceMap(task) + val extraInfoMap = buildExtraInfoMap(task) + + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null + + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( taskProgressUpdateEvent.taskId, taskProgressUpdateEvent.progress, - taskProgressUpdateEvent.progressInfo + taskProgressUpdateEvent.progressInfo, + resourceMap, + extraInfoMap ) - ) - } else { - logger.error( - "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON - .toJson(taskProgressUpdateEvent) - ) + + sendToEntrance(task, respRunningInfo) + } else { + logger.info( + "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON + .toJson(taskProgressUpdateEvent) + ) + } } } - } override def onResultSetCreated(taskResultCreateEvent: TaskResultCreateEvent): Unit = { logger.info(s"start to deal result event ${taskResultCreateEvent.taskId}") @@ -694,7 +693,7 @@ class TaskExecutionServiceImpl ) ) } else { - logger.error(s"Task cannot null! taskResultCreateEvent: ${taskResultCreateEvent.taskId}") + logger.info(s"Task cannot null! taskResultCreateEvent: ${taskResultCreateEvent.taskId}") } logger.info(s"Finished to deal result event ${taskResultCreateEvent.taskId}") } @@ -705,7 +704,7 @@ class TaskExecutionServiceImpl if (null != executor) { executor.getTaskById(taskId) } else { - logger.error(s"Executor of taskId : $taskId is not cached.") + logger.info(s"Executor of taskId : $taskId is not cached.") null } } @@ -721,7 +720,7 @@ class TaskExecutionServiceImpl ) ) } else { - logger.error( + logger.info( "Task cannot null! taskResultSizeCreatedEvent: " + ComputationEngineUtils.GSON .toJson(taskResultSizeCreatedEvent) ) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala index 1f13380c01c..c7d78ef9eb3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala @@ -57,7 +57,7 @@ class ECTaskEntranceMonitor def unregister(taskID: String): Unit = { if (!wrapperMap.containsKey(taskID)) { - logger.error("attempted to unregister non-existing EngineConnTask!! task-id: " + taskID) + logger.warn("attempted to unregister non-existing EngineConnTask!! task-id: " + taskID) } wrapperMap.remove(taskID) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala index f2b894ef91a..3cc1fdfe82b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala @@ -18,6 +18,8 @@ package org.apache.linkis.engineconn.computation.executor.upstream import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration +import org.apache.linkis.engineconn.acessible.executor.service.ExecutorHeartbeatServiceHolder import org.apache.linkis.engineconn.common.exception.EngineConnException import org.apache.linkis.engineconn.computation.executor.upstream.access.{ ConnectionInfoAccess, @@ -29,6 +31,8 @@ import org.apache.linkis.engineconn.computation.executor.upstream.handler.{ } import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.ConnectionInfoWrapper import org.apache.linkis.engineconn.computation.executor.utlis.ComputationErrorCode +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor import org.apache.commons.lang3.concurrent.BasicThreadFactory @@ -112,6 +116,23 @@ abstract class SingleThreadUpstreamConnectionMonitor( "requesting connection info: " + util.Arrays .toString(Collections.list(wrapperMap.keys).toArray()) ) + if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getValue) { + val executor = ExecutorManager.getInstance.getReportExecutor + executor match { + case concurrentExecutor: ConcurrentExecutor => + if (toBeRequested.size() > (concurrentExecutor.getConcurrentLimit + 20)) { + logger.warn( + s"Executor running task has exceed the limit ${toBeRequested.size()}, executor id ${concurrentExecutor.getId}" + ) + ExecutorHeartbeatServiceHolder + .getDefaultHeartbeatService() + .setSelfUnhealthy( + s"running task has exceed the limit: ${concurrentExecutor.getConcurrentLimit}" + ) + } + case _ => + } + } val infoAccessRequest = generateInfoAccessRequest(toBeRequested) val connectionInfoList = infoAccess.getUpstreamInfo(infoAccessRequest) logger.info( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala index d06e8ac0779..15e70315e3f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala @@ -39,10 +39,10 @@ class ECTaskKillHandler extends MonitorHandler with Logging { while (elements.hasNext) { val element = elements.next Utils.tryCatch { - doKill(element) logger.error( s"ERROR: entrance : ${element.getUpstreamConnection().getUpstreamServiceInstanceName()} lose connect, will kill job : ${element.getKey()}" ) + doKill(element) } { t => logger.error("Failed to kill job: " + element.getKey, t) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala index 0acc47260ef..44a80cff62f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala @@ -66,8 +66,7 @@ class ECTaskEntranceMonitorService logger.info("registering new task: " + event.taskId) eCTaskEntranceMonitor.register(event.task, event.executor) } else if ( - fromStatus == ExecutionNodeStatus.Running && - (toStatus == ExecutionNodeStatus.Succeed || toStatus == ExecutionNodeStatus.Failed || toStatus == ExecutionNodeStatus.Cancelled || toStatus == ExecutionNodeStatus.Timeout) + !ExecutionNodeStatus.isCompleted(fromStatus) && ExecutionNodeStatus.isCompleted(toStatus) ) { logger.info("unRegistering task: " + event.taskId) eCTaskEntranceMonitor.unregister(event.task.getTaskId) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala index 2a03b405cb1..4e50fc0cf0a 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala @@ -27,4 +27,6 @@ object ComputationEngineConstant { def CS_HOOK_ORDER: Int = -1 + val TASK_EXECUTION_THREAD = "TaskExecution-Thread-" + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHookTest.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHookTest.scala new file mode 100644 index 00000000000..83d83b9c1ab --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHookTest.scala @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.engineconn.common.creation.{ + DefaultEngineCreationContext, + EngineCreationContext +} +import org.apache.linkis.engineconn.common.engineconn.DefaultEngineConn +import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel + +import org.junit.jupiter.api.Test +import org.mockito.Mockito.{mock, verify, when} + +// 单元测试案例 +class PythonModuleLoadEngineConnHookTest { + +// @Test +// def testAfterExecutionExecute(): Unit = { +// // 创建模拟对象 +// val mockEngineCreationContext = new DefaultEngineCreationContext +// val mockEngineConn = mock[DefaultEngineConn] +// val hook = new PythonSparkEngineHook +// +// // 设置模拟行为 +// var labels = new CodeLanguageLabel +// labels.setCodeType("spark") +// +// // 执行测试方法 +// hook.afterExecutionExecute(mockEngineCreationContext, mockEngineConn) +// +// } +// +// @Test +// def testAfterEngineServerStartFailed(): Unit = { +// // 创建模拟对象 +// val mockEngineCreationContext = mock[EngineCreationContext] +// val mockThrowable = mock[Throwable] +// val hook = new PythonSparkEngineHook +// +// // 设置模拟行为 +// var labels = new CodeLanguageLabel +// labels.setCodeType("spark") +// +// // 执行测试方法 +// hook.afterEngineServerStartFailed(mockEngineCreationContext, mockThrowable) +// +// } +// +// @Test +// def testBeforeCreateEngineConn(): Unit = { +// // 创建模拟对象 +// +// // 验证调用 +// +// } +// +// @Test +// def testBeforeExecutionExecute(): Unit = { +// // 创建模拟对象 +// val mockEngineCreationContext = mock[EngineCreationContext] +// val mockEngineConn = mock[DefaultEngineConn] +// val hook = new PythonSparkEngineHook +// +// // 执行测试方法 +// hook.beforeExecutionExecute(mockEngineCreationContext, mockEngineConn) +// +// } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala deleted file mode 100644 index 42e79c52ccf..00000000000 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineconn.computation.executor.upstream.access - -import org.apache.linkis.DataWorkCloudApplication -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.common.conf.{CommonVars, DWCArgumentsParser} -import org.apache.linkis.common.utils.Utils -import org.apache.linkis.engineconn.common.creation.DefaultEngineCreationContext -import org.apache.linkis.engineconn.core.util.EngineConnUtils -import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser -import org.apache.linkis.manager.engineplugin.common.launch.process.Environment -import org.apache.linkis.manager.label.builder.factory.{ - LabelBuilderFactory, - LabelBuilderFactoryContext -} -import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.server.conf.ServerConfiguration - -import org.apache.commons.lang3.StringUtils - -import java.util - -import org.slf4j.{Logger, LoggerFactory} - -object ECTaskEntranceInfoAccessHelper { - val logger: Logger = LoggerFactory.getLogger(ECTaskEntranceInfoAccessHelper.getClass) - - val engineCreationContext = new DefaultEngineCreationContext - val labelBuilderFactory: LabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory - - def initApp(args: Array[String]): Unit = { - val arguments = EngineConnArgumentsParser.getEngineConnArgumentsParser.parseToObj(args) - val engineConf = arguments.getEngineConnConfMap - engineCreationContext.setUser(engineConf.getOrElse("user", Utils.getJvmUser)) - engineCreationContext.setTicketId(engineConf.getOrElse("ticketId", "")) - val host = CommonVars(Environment.ECM_HOST.toString, "127.0.0.1").getValue - val port = CommonVars(Environment.ECM_PORT.toString, "80").getValue - engineCreationContext.setEMInstance( - ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port") - ) - val labels = new util.ArrayList[Label[_]] - val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX)) - if (labelArgs.nonEmpty) { - labelArgs.foreach { case (key, value) => - labels.add( - labelBuilderFactory - .createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""), value) - ) - } - engineCreationContext.setLabels(labels) - } - val jMap = new java.util.HashMap[String, String](engineConf.size) - engineConf.foreach(kv => jMap.put(kv._1, kv._2)) - engineCreationContext.setOptions(jMap) - engineCreationContext.setArgs(args) - // EngineConnObject.setEngineCreationContext(engineCreationContext) - logger.info( - "Finished to init engineCreationContext: " + EngineConnUtils.GSON - .toJson(engineCreationContext) - ) - - logger.info("Spring is enabled, now try to start SpringBoot.") - logger.info("<--------------------Start SpringBoot App-------------------->") - val parser = DWCArgumentsParser.parse(engineCreationContext.getArgs) - DWCArgumentsParser.setDWCOptionMap(parser.getDWCConfMap) - val existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.getValue - if (!StringUtils.isEmpty(existsExcludePackages)) { - DataWorkCloudApplication.setProperty( - ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.key, - existsExcludePackages - ) - } - // 加载spring类 - DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(parser.getSpringConfMap)) - - logger.info("<--------------------SpringBoot App init succeed-------------------->") - } - -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala index 1518f00a6ce..896b1bc7dae 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala @@ -37,7 +37,7 @@ object EngineConnConf { val ENGINE_CONN_ONCE_HOOKS = CommonVars( "linkis.engine.connector.once.hooks", - "org.apache.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook" + "org.apache.linkis.engineconn.once.executor.hook.OnceEngineConnHook" ) val ENGINE_LAUNCH_CMD_PARAMS_USER_KEY = @@ -61,6 +61,8 @@ object EngineConnConf { val ENGINE_CONN_LOCAL_LOG_DIRS_KEY = CommonVars("wds.linkis.engine.logs.dir.key", "LOG_DIRS") + val ENGINE_CONN_LOCAL_TMP_DIR = CommonVars("wds.linkis.engine.tmp.dir", "TEMP_DIRS") + val ENGINE_CONN_CREATION_WAIT_TIME = CommonVars("wds.linkis.engine.connector.init.time", new TimeType("8m")) @@ -79,8 +81,15 @@ object EngineConnConf { val HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX = CommonVars("wds.linkis.hive.engine.yarn.app.id.parse.regex", "(application_\\d{13}_\\d+)") + val JOB_YARN_TASK_URL = CommonVars("linkis.job.task.yarn.url", ""); + + val JOB_YARN_CLUSTER_TASK_URL = CommonVars("linkis.job.task.yarn.cluster.url", ""); + + val ENGINE_CONF_REVENT_SWITCH = CommonVars("linkis.engine.conf.revent.switch", true) def getWorkHome: String = System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue) + def getEngineTmpDir: String = System.getenv(ENGINE_CONN_LOCAL_TMP_DIR.getValue) + def getLogDir: String = { val logDir = System.getenv(ENGINE_CONN_LOCAL_LOG_DIRS_KEY.getValue) if (StringUtils.isNotEmpty(logDir)) logDir else new File(getWorkHome, "logs").getPath diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala index 352a9039cb5..fec2756f9fe 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala @@ -24,4 +24,8 @@ object EngineConnConstant { val SPRING_CONF_MAP_NAME = "SpringConfMap" val MAX_EXECUTOR_ID_NAME = "MaxExecutorId" + + var hiveLogReg = "The url to track the job: http://.*?/proxy/(application_[0-9]+_[0-9]+)/" + + val YARN_LOG_URL = "Yarn application url:" } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala index f758b129e3b..61242beaae4 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala @@ -137,7 +137,7 @@ class LabelExecutorManagerImpl extends LabelExecutorManager with Logging { } protected def getLabelKey(labels: Array[Label[_]]): String = - labels.map(_.getStringValue).mkString("&") + labels.filter(null != _).map(_.getStringValue).mkString("&") protected def createExecutor( engineCreationContext: EngineCreationContext, @@ -171,7 +171,10 @@ class LabelExecutorManagerImpl extends LabelExecutorManager with Logging { MessageFormat.format(CANNOT_GET_LABEL_KEY.getErrorDesc, GSON.toJson(labels)) ) } - + if (!executors.isEmpty && factories.size <= 1) { + logger.info("For a single Executor EC, if an Executor exists, it will be returned directly") + return getReportExecutor.asInstanceOf[LabelExecutor] + } if (!executors.containsKey(labelKey)) executors synchronized { if (!executors.containsKey(labelKey)) { val executor = tryCreateExecutor(engineCreationContext, labels) @@ -184,6 +187,12 @@ class LabelExecutorManagerImpl extends LabelExecutorManager with Logging { override def generateExecutorId(): Int = idCreator.getAndIncrement() override def getExecutorByLabels(labels: Array[Label[_]]): LabelExecutor = { + + if (!executors.isEmpty && factories.size <= 1) { + logger.info("For a single Executor EC, if an Executor exists, it will be returned directly") + return getReportExecutor.asInstanceOf[LabelExecutor] + } + val labelKey = getLabelKey(labels) if (null == labelKey) return null if (!executors.containsKey(labelKey)) executors synchronized { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala index 86ab8a1f684..524f44c33af 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/hook/ShutdownHook.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.core.hook import org.apache.linkis.common.utils.Logging import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.locks.ReentrantLock class ShutdownHook extends Logging { @@ -35,6 +36,10 @@ class ShutdownHook extends Logging { // Guarded by "lock" private var stopped: Boolean = false + private val tryStopTimes = new AtomicInteger(0) + + private val maxTimes = 10; + def notifyError(e: Throwable): Unit = { lock.lock() try { @@ -49,12 +54,17 @@ class ShutdownHook extends Logging { def notifyStop(): Unit = { lock.lock() + val num = tryStopTimes.incrementAndGet() try { setExitCode(0) stopped = true condition.signalAll() } finally { lock.unlock() + if (num >= maxTimes) { + logger.error(s"try to stop with times:${num}, now do system exit!!!") + System.exit(0) + } } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala index bbace9e66fb..d2247a6d2e3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala @@ -17,10 +17,14 @@ package org.apache.linkis.engineconn.core.util -import com.google.gson.Gson +import com.google.gson.{GsonBuilder, ToNumberPolicy} object EngineConnUtils { - val GSON = new Gson() + val GSON = new GsonBuilder() + .setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ") + .serializeNulls + .setObjectToNumberStrategy(ToNumberPolicy.LAZILY_PARSED_NUMBER) + .create } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala index 180798a772d..14a0701d9df 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala @@ -127,6 +127,7 @@ object EngineConnServer extends Logging { this.engineCreationContext.setEMInstance( ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port") ) + val labels = new ArrayBuffer[Label[_]] val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX)) if (labelArgs.nonEmpty) { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java index 3da912d483e..05976bb2c71 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java @@ -18,8 +18,14 @@ package org.apache.linkis.engineconn.acessible.executor.log; import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration; +import org.apache.linkis.engineconn.common.conf.EngineConnConf; +import org.apache.linkis.engineconn.common.conf.EngineConnConstant; +import org.apache.linkis.engineconn.common.creation.EngineCreationContext; +import org.apache.linkis.engineconn.core.EngineConnObject; import org.apache.linkis.engineconn.executor.listener.EngineConnSyncListenerBus; import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.manager.label.entity.Label; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.Filter; @@ -33,6 +39,8 @@ import org.apache.logging.log4j.core.layout.PatternLayout; import java.io.Serializable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -93,6 +101,7 @@ public void append(LogEvent event) { } } if (!flag) { + // logStr = matchLog(logStr); logCache.cacheLog(logStr); } } else { @@ -115,4 +124,28 @@ public static SendAppender createAppender( } return new SendAppender(name, filter, layout, ignoreExceptions); } + + /** + * * + * + *

Match the hive log, if it matches the yarn log, print the log and replace it + */ + public String matchLog(String logLine) { + Matcher hiveMatcher = Pattern.compile(EngineConnConstant.hiveLogReg()).matcher(logLine); + if (hiveMatcher.find()) { + String yarnUrl = EngineConnConf.JOB_YARN_TASK_URL().getValue(); + EngineCreationContext engineContext = EngineConnObject.getEngineCreationContext(); + if (null != engineContext) { + for (Label label : engineContext.getLabels()) { + if (label.getLabelKey().equals(LabelKeyConstant.YARN_CLUSTER_KEY)) { + yarnUrl = EngineConnConf.JOB_YARN_CLUSTER_TASK_URL().getValue(); + } + } + } + logLine = + hiveMatcher.replaceAll( + EngineConnConstant.YARN_LOG_URL() + yarnUrl + hiveMatcher.group(1)); + } + return logLine; + } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala index 0eb211f7319..0cebf5ed15d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala @@ -37,13 +37,16 @@ object AccessibleExecutorConfiguration { val ENGINECONN_LOG_SEND_SIZE = CommonVars[Int]("wds.linkis.engineconn.log.send.cache.size", 300) val ENGINECONN_MAX_FREE_TIME = - CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("30m")) + CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("5m")) val ENGINECONN_LOCK_CHECK_INTERVAL = CommonVars("wds.linkis.engineconn.lock.free.interval", new TimeType("3m")) - val ENGINECONN_SUPPORT_PARALLELISM: Boolean = - CommonVars("wds.linkis.engineconn.support.parallelism", false).getValue + val ENGINECONN_ENABLED_LOCK_IDLE_TIME_OUT = + CommonVars("linkis.engineconn.enabled.lock.timeout.release", true) + + val ENGINECONN_SUPPORT_PARALLELISM = + CommonVars("wds.linkis.engineconn.support.parallelism", false) val ENGINECONN_HEARTBEAT_TIME = CommonVars("wds.linkis.engineconn.heartbeat.time", new TimeType("2m")) @@ -55,4 +58,22 @@ object AccessibleExecutorConfiguration { val ENABLE_MAINTAIN_CREATORS = CommonVars("wds.linkis.engineconn.maintain.cretors", "IDE") + val REPORTING_DELAY_MS = CommonVars( + "linkis.engineconn.heartbeat.report.delay", + 20, + "Heartbeat status reporting delay, default 20ms, Negative numbers do not take effect" + ).getValue + + val REPORTING_IGNORE_MS = CommonVars( + "linkis.engineconn.heartbeat.report.ignore", + 3, + "Heartbeat status report repeated ignore, default 3ms,Negative numbers do not take effect" + ).getValue + + val ENGINECONN_AUTO_EXIT = + CommonVars("linkis.engineconn.support.auto.exit", false).getValue + + val ENGINECONN_AUTO_EXIT_DAYS = + CommonVars("linkis.engineconn.auto.exit.days", 7).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala index 53cdd44b056..93cb41f344f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala @@ -43,9 +43,13 @@ class AccessibleExecutorSpringConfiguration extends Logging { def createLockManager(): LockService = { val lockService = - if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM) { + if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getHotValue()) { + logger.info("Engine supports parallelism.") new EngineConnConcurrentLockService - } else new EngineConnTimedLockService + } else { + logger.info("Engine doesn't support parallelism.") + new EngineConnTimedLockService + } asyncListenerBusContext.addListener(lockService) lockService } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala index 1b5713e5689..e99f5f21c1b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.acessible.executor.execution import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor +import org.apache.linkis.engineconn.acessible.executor.service.ExecutorHeartbeatServiceHolder import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.common.execution.EngineConnExecution @@ -40,6 +41,7 @@ import org.apache.linkis.manager.common.protocol.resource.ResourceUsedProtocol import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender +import java.util.Random import java.util.concurrent.TimeUnit class AccessibleEngineConnExecution extends EngineConnExecution with Logging { @@ -73,6 +75,9 @@ class AccessibleEngineConnExecution extends EngineConnExecution with Logging { reportUsedResource(executor, engineCreationContext) reportLabel(executor) executorStatusChecker + if (AccessibleExecutorConfiguration.ENGINECONN_AUTO_EXIT) { + ecAutoExit() + } afterReportToLinkisManager(executor, engineCreationContext, engineConn) } @@ -140,6 +145,39 @@ class AccessibleEngineConnExecution extends EngineConnExecution with Logging { ) } + /** + * EC auto exit only support concurrent executor + */ + private def ecAutoExit(): Unit = { + logger.info(s"ec auto exit start ${System.currentTimeMillis()}") + Utils.defaultScheduler.schedule( + new Runnable { + override def run(): Unit = Utils.tryAndWarn { + ExecutorManager.getInstance.getReportExecutor match { + case executor: ConcurrentExecutor => + val rand = new Random + val minute = rand.nextInt(5) + 1 + Thread.sleep(minute * 60000L) + if (executor.hasTaskRunning()) { + ExecutorHeartbeatServiceHolder + .getDefaultHeartbeatService() + .setSelfUnhealthy(s"EC running time exceed max time") + } else { + logger.warn( + s"Executor has no task running ${executor.getId}, will be to shutdown ec" + ) + executor.tryShutdown() + } + case _ => + logger.warn(s"Executor is not a ConcurrentExecutor, do noting") + } + } + }, + AccessibleExecutorConfiguration.ENGINECONN_AUTO_EXIT_DAYS, + TimeUnit.DAYS + ) + } + def requestManagerReleaseExecutor(msg: String, nodeStatus: NodeStatus): Unit = { val engineReleaseRequest = new EngineConnReleaseRequest( Sender.getThisServiceInstance, diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/hook/OperationHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/hook/OperationHook.scala new file mode 100644 index 00000000000..12e42c66a55 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/hook/OperationHook.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.acessible.executor.hook + +import org.apache.linkis.manager.common.protocol.engine.{ + EngineOperateRequest, + EngineOperateResponse +} + +import scala.collection.mutable.ArrayBuffer + +trait OperationHook { + def getName(): String + + def doPreOperation( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit + + def doPostOperation( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit + +} + +object OperationHook { + private var operationHooks: ArrayBuffer[OperationHook] = new ArrayBuffer[OperationHook]() + + def registerOperationHook(operationHook: OperationHook): Unit = { + operationHooks.append(operationHook) + } + + def getOperationHooks(): Array[OperationHook] = operationHooks.toArray +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala index 4365d5881d6..6b96b6d4ed6 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala @@ -20,7 +20,7 @@ package org.apache.linkis.engineconn.acessible.executor.info import org.apache.linkis.common.utils.Logging import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor import org.apache.linkis.engineconn.core.executor.ExecutorManager -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo import org.springframework.stereotype.Component @@ -29,20 +29,45 @@ trait NodeHealthyInfoManager { def getNodeHealthyInfo(): NodeHealthyInfo + def setNodeHealthy(healthy: NodeHealthy): Unit + + def getNodeHealthy(): NodeHealthy + + def setByManager(setByManager: Boolean): Unit + } @Component class DefaultNodeHealthyInfoManager extends NodeHealthyInfoManager with Logging { + private var healthy: NodeHealthy = NodeHealthy.Healthy + + private var setByManager: Boolean = false + override def getNodeHealthyInfo(): NodeHealthyInfo = { val nodeHealthyInfo = new NodeHealthyInfo nodeHealthyInfo.setMsg("") - nodeHealthyInfo.setNodeHealthy( + + /** 如果是manager主动设置的,则以manager设置的为准 */ + val newHealthy: NodeHealthy = if (this.setByManager) { + this.healthy + } else { NodeStatus.isEngineNodeHealthy( ExecutorManager.getInstance.getReportExecutor.asInstanceOf[AccessibleExecutor].getStatus ) - ) + } + logger.info("current node healthy status is {}", newHealthy) + nodeHealthyInfo.setNodeHealthy(newHealthy) nodeHealthyInfo } + override def setNodeHealthy(healthy: NodeHealthy): Unit = { + this.healthy = healthy + } + + override def setByManager(setByManager: Boolean): Unit = { + this.setByManager = setByManager + } + + override def getNodeHealthy(): NodeHealthy = this.healthy } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala index 26a9203795f..bb395450917 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala @@ -27,7 +27,9 @@ import org.apache.linkis.engineconn.acessible.executor.listener.event.{ ExecutorStatusChangedEvent, ExecutorUnLockEvent } +import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.SensibleExecutor import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.manager.common.entity.enumeration.NodeStatus @@ -42,12 +44,14 @@ class EngineConnTimedLock(private var timeout: Long) val releaseScheduler = new ScheduledThreadPoolExecutor(1) var releaseTask: ScheduledFuture[_] = null var lastLockTime: Long = 0 - var lockedBy: AccessibleExecutor = null + + val idleTimeLockOut = AccessibleExecutorConfiguration.ENGINECONN_LOCK_CHECK_INTERVAL + .getValue(EngineConnObject.getEngineCreationContext.getOptions) + .toLong override def acquire(executor: AccessibleExecutor): Unit = { lock.acquire() lastLockTime = System.currentTimeMillis() - lockedBy = executor scheduleTimeout } @@ -57,8 +61,6 @@ class EngineConnTimedLock(private var timeout: Long) logger.debug("try to lock for succeed is " + succeed.toString) if (succeed) { lastLockTime = System.currentTimeMillis() - lockedBy = executor - logger.debug("try to lock for add time out task ! Locked by thread : " + lockedBy.getId) scheduleTimeout } succeed @@ -67,18 +69,13 @@ class EngineConnTimedLock(private var timeout: Long) // Unlock callback is not called in release method, because release method is called actively override def release(): Unit = { logger.debug( - "try to release for lock," + lockedBy + ",current thread " + Thread.currentThread().getName + s"try to release for lock: ${lock.toString}, current thread " + Thread.currentThread().getName ) - if (lockedBy != null) { - // && lockedBy == Thread.currentThread() Inconsistent thread(线程不一致) - logger.debug("try to release for lockedBy and thread ") - if (releaseTask != null) { - releaseTask.cancel(true) - releaseTask = null - } - logger.debug("try to release for lock release success") - lockedBy = null + if (releaseTask != null) { + releaseTask.cancel(true) + releaseTask = null } + logger.debug("try to release for lock release success") unlockCallback(lock.toString) resetLock() } @@ -96,7 +93,6 @@ class EngineConnTimedLock(private var timeout: Long) releaseScheduler.purge() } lock.release() - lockedBy = null } resetLock() } @@ -108,19 +104,26 @@ class EngineConnTimedLock(private var timeout: Long) new Runnable { override def run(): Unit = { synchronized { - if (isAcquired() && NodeStatus.Idle == lockedBy.getStatus && isExpired()) { - // unlockCallback depends on lockedBy, so lockedBy cannot be set null before unlockCallback - logger.info(s"Lock : [${lock.toString} was released due to timeout.") - release() - } else if (isAcquired() && NodeStatus.Busy == lockedBy.getStatus) { - lastLockTime = System.currentTimeMillis() - logger.info("Update lastLockTime because executor is busy.") + ExecutorManager.getInstance.getReportExecutor match { + case reportExecutor: AccessibleExecutor => + if ( + isAcquired() && NodeStatus.Idle == reportExecutor.getStatus && isExpired() + ) { + // unlockCallback depends on lockedBy, so lockedBy cannot be set null before unlockCallback + logger.info( + s"Lock : [${lock.toString} was released due to timeout. idleTimeLockOut $idleTimeLockOut" + ) + release() + } else if (isAcquired() && NodeStatus.Busy == reportExecutor.getStatus) { + lastLockTime = System.currentTimeMillis() + logger.info("Update lastLockTime because executor is busy.") + } } } } }, 3000, - AccessibleExecutorConfiguration.ENGINECONN_LOCK_CHECK_INTERVAL.getValue.toLong, + idleTimeLockOut, TimeUnit.MILLISECONDS ) logger.info("Add scheduled timeout task.") @@ -135,7 +138,11 @@ class EngineConnTimedLock(private var timeout: Long) override def isExpired(): Boolean = { if (lastLockTime == 0) return false if (timeout <= 0) return false - System.currentTimeMillis() - lastLockTime > timeout + if (AccessibleExecutorConfiguration.ENGINECONN_ENABLED_LOCK_IDLE_TIME_OUT.getValue) { + System.currentTimeMillis() - lastLockTime > idleTimeLockOut + } else { + System.currentTimeMillis() - lastLockTime > timeout + } } override def numOfPending(): Int = { @@ -143,14 +150,12 @@ class EngineConnTimedLock(private var timeout: Long) } override def renew(): Boolean = { - if (lockedBy != null) { - if (isAcquired && releaseTask != null) { - if (releaseTask.cancel(false)) { - releaseScheduler.purge() - scheduleTimeout - lastLockTime = System.currentTimeMillis() - return true - } + if (isAcquired && releaseTask != null) { + if (releaseTask.cancel(false)) { + releaseScheduler.purge() + scheduleTimeout + lastLockTime = System.currentTimeMillis() + return true } } false @@ -169,9 +174,18 @@ class EngineConnTimedLock(private var timeout: Long) } private def unlockCallback(lockStr: String): Unit = { - /* if (null != lockedBy) { - lockedBy.transition(NodeStatus.Unlock) - } */ + val nodeStatus = ExecutorManager.getInstance.getReportExecutor match { + case sensibleExecutor: SensibleExecutor => + sensibleExecutor.getStatus + case _ => NodeStatus.Idle + } + if (NodeStatus.isCompleted(nodeStatus)) { + logger.info( + "The node({}) is already in the completed state, and the unlocking is invalid", + nodeStatus.toString + ) + return + } val executors = ExecutorManager.getInstance.getExecutors.filter(executor => null != executor && !executor.isClosed ) @@ -185,7 +199,7 @@ class EngineConnTimedLock(private var timeout: Long) ExecutorListenerBusContext .getExecutorListenerBusContext() .getEngineConnAsyncListenerBus - .post(ExecutorUnLockEvent(null, lockStr.toString)) + .post(ExecutorUnLockEvent(null, lockStr)) } override def onExecutorCreated(executorCreateEvent: ExecutorCreateEvent): Unit = {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala index 7602a3b2887..a7169697aea 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala @@ -44,6 +44,10 @@ object LogHelper extends Logging { def setLogListener(logListener: LogListener): Unit = this.logListener = logListener + def cacheLog(log: String): Unit = { + logCache.cacheLog(log) + } + def pushAllRemainLogs(): Unit = { // logger.info(s"start to push all remain logs") Thread.sleep(30) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala index ad762892ef4..c7635615e0c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala @@ -17,9 +17,11 @@ package org.apache.linkis.engineconn.acessible.executor.operator.impl +import org.apache.linkis.engineconn.acessible.executor.service.OperateService import org.apache.linkis.engineconn.common.exception.EngineConnException import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.YarnExecutor +import org.apache.linkis.governance.common.constant.ec.ECConstants._ import org.apache.linkis.manager.common.operator.Operator class EngineConnApplicationInfoOperator extends Operator { @@ -30,10 +32,10 @@ class EngineConnApplicationInfoOperator extends Operator { ExecutorManager.getInstance.getReportExecutor match { case yarnExecutor: YarnExecutor => Map( - "applicationId" -> yarnExecutor.getApplicationId, - "applicationUrl" -> yarnExecutor.getApplicationURL, - "queue" -> yarnExecutor.getQueue, - "yarnMode" -> yarnExecutor.getYarnMode + YARN_APPID_NAME_KEY -> yarnExecutor.getApplicationId, + YARN_APP_URL_KEY -> yarnExecutor.getApplicationURL, + QUEUE -> yarnExecutor.getQueue, + YARN_MODE_KEY -> yarnExecutor.getYarnMode ) case _ => throw EngineConnException( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala index d07d16ce273..97a9cab5da8 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala @@ -19,6 +19,7 @@ package org.apache.linkis.engineconn.acessible.executor.service import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor import org.apache.linkis.engineconn.acessible.executor.listener.event.{ ExecutorCompletedEvent, @@ -57,6 +58,12 @@ class DefaultAccessibleService extends AccessibleService with Logging { private var shutDownHooked: Boolean = false + private var lastStatusChanged: Long = System.currentTimeMillis() + + private var lastStatus: NodeStatus = null + + private var lastThreadName: String = null + @Receiver override def dealEngineStopRequest( engineSuicideRequest: EngineSuicideRequest, @@ -67,7 +74,7 @@ class DefaultAccessibleService extends AccessibleService with Logging { DataWorkCloudApplication.getServiceInstance.equals(engineSuicideRequest.getServiceInstance) ) { stopEngine() - logger.info(s"engine will suiside now.") + logger.info(s"engine was asked to suiside by ${engineSuicideRequest.getUser} now.") ShutdownHook.getShutdownHook.notifyStop() } else { if (null != engineSuicideRequest.getServiceInstance) { @@ -106,7 +113,6 @@ class DefaultAccessibleService extends AccessibleService with Logging { logger.info("Reported status shuttingDown to manager.") Utils.tryQuietly(Thread.sleep(2000)) shutDownHooked = true - ShutdownHook.getShutdownHook.notifyStop() } override def stopExecutor: Unit = { @@ -167,7 +173,39 @@ class DefaultAccessibleService extends AccessibleService with Logging { override def onExecutorStatusChanged( executorStatusChangedEvent: ExecutorStatusChangedEvent ): Unit = { - reportHeartBeatMsg(executorStatusChangedEvent.executor) + val sinceLastTime = System.currentTimeMillis() - lastStatusChanged + val reportDelay = AccessibleExecutorConfiguration.REPORTING_DELAY_MS + if ( + reportDelay > 0 && executorStatusChangedEvent.toStatus != lastStatus && reportDelay > sinceLastTime + ) { + logger.info( + "In order to ensure that the previous state is consumed first, sleep here {} ms", + reportDelay * 2 + ) + + Thread.sleep(reportDelay * 2) + } + val ignoreTime = AccessibleExecutorConfiguration.REPORTING_IGNORE_MS + val currentThreadName = Thread.currentThread().getName + if ( + ignoreTime > 0 && executorStatusChangedEvent.toStatus == lastStatus && ignoreTime > sinceLastTime && currentThreadName + .equals(lastThreadName) + ) { + logger.info( + "If the status is the same and the time is short and the thread is the same, no status report is performed {}", + executorStatusChangedEvent + ) + } else if ( + NodeStatus.Busy == lastStatus && executorStatusChangedEvent.toStatus == NodeStatus.Idle + ) { + logger.info("The state transition from Busy to Idle is not reported") + } else { + reportHeartBeatMsg(executorStatusChangedEvent.executor) + } + logger.info("Finished to report status {}", executorStatusChangedEvent) + lastStatusChanged = System.currentTimeMillis() + lastStatus = executorStatusChangedEvent.toStatus + lastThreadName = currentThreadName } private def reportHeartBeatMsg(executor: Executor): Unit = { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala index 067e0d2cbb8..ff8e6666d16 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala @@ -31,8 +31,13 @@ import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.{Executor, ResourceExecutor, SensibleExecutor} import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineconn.executor.service.ManagerService -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.protocol.node.{NodeHeartbeatMsg, NodeHeartbeatRequest} +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo +import org.apache.linkis.manager.common.protocol.node.{ + NodeHealthyRequest, + NodeHeartbeatMsg, + NodeHeartbeatRequest +} import org.apache.linkis.rpc.Sender import org.apache.linkis.rpc.message.annotation.Receiver @@ -61,6 +66,8 @@ class DefaultExecutorHeartbeatService private val asyncListenerBusContext = ExecutorListenerBusContext.getExecutorListenerBusContext.getEngineConnAsyncListenerBus + private val healthyLock = new Object() + @PostConstruct private def init(): Unit = { asyncListenerBusContext.addListener(this) @@ -78,6 +85,7 @@ class DefaultExecutorHeartbeatService heartbeatTime, TimeUnit.MILLISECONDS ) + ExecutorHeartbeatServiceHolder.registerHeartBeatService(this) } /** @@ -94,6 +102,16 @@ class DefaultExecutorHeartbeatService nodeHeartbeatRequest: NodeHeartbeatRequest ): NodeHeartbeatMsg = generateHeartBeatMsg(null) + @Receiver + def dealNodeHealthyRequest(nodeHealthyRequest: NodeHealthyRequest): Unit = + healthyLock synchronized { + val toHealthy = nodeHealthyRequest.getNodeHealthy + val healthyInfo: NodeHealthyInfo = nodeHealthyInfoManager.getNodeHealthyInfo() + logger.info(s"engine nodeHealthy from ${healthyInfo.getNodeHealthy} to ${toHealthy}") + nodeHealthyInfoManager.setByManager(true) + nodeHealthyInfoManager.setNodeHealthy(toHealthy) + } + override def onNodeHealthyUpdate(nodeHealthyUpdateEvent: NodeHealthyUpdateEvent): Unit = { logger.warn(s"node healthy update, tiger heartbeatReport") // val executor = ExecutorManager.getInstance.getReportExecutor @@ -138,4 +156,15 @@ class DefaultExecutorHeartbeatService nodeHeartbeatMsg } + override def setSelfUnhealthy(reason: String): Unit = healthyLock synchronized { + logger.info(s"Set self to unhealthy to automatically exit, reason: $reason") + if (EngineConnObject.isReady) { + val nodeHealthyInfo = nodeHealthyInfoManager.getNodeHealthyInfo() + if (nodeHealthyInfo.getNodeHealthy != NodeHealthy.UnHealthy) { + nodeHealthyInfoManager.setNodeHealthy(NodeHealthy.UnHealthy) + nodeHealthyInfoManager.setByManager(true) + } + } + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala index 1ab5a16265f..fe3d731b7d2 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala @@ -75,6 +75,9 @@ class DefaultManagerService extends ManagerService with Logging { override def heartbeatReport(nodeHeartbeatMsg: NodeHeartbeatMsg): Unit = { getManagerSender.send(nodeHeartbeatMsg) + if (nodeHeartbeatMsg != null && nodeHeartbeatMsg.getHealthyInfo != null) { + logger.info("report engine healthy status: {}", nodeHeartbeatMsg.getHealthyInfo) + } logger.info( "success to send engine heartbeat report to {},status: {},msg: {}", Array( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala index df34626c204..c0ef50636dd 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala @@ -18,6 +18,7 @@ package org.apache.linkis.engineconn.acessible.executor.service import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.hook.OperationHook import org.apache.linkis.manager.common.operator.OperatorFactory import org.apache.linkis.manager.common.protocol.engine.{ EngineOperateRequest, @@ -38,19 +39,50 @@ class DefaultOperateService extends OperateService with Logging { override def executeOperation( engineOperateRequest: EngineOperateRequest ): EngineOperateResponse = { + var response: EngineOperateResponse = null + val parameters = engineOperateRequest.parameters.asScala.toMap val operator = Utils.tryCatch(OperatorFactory().getOperatorRequest(parameters)) { t => logger.error(s"Get operator failed, parameters is ${engineOperateRequest.parameters}.", t) - return EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + doPostHook(engineOperateRequest, response) + return response } logger.info( s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${engineOperateRequest.parameters}." ) val result = Utils.tryCatch(operator(parameters)) { t => logger.error(s"Execute ${operator.getClass.getSimpleName} failed.", t) - return EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + doPostHook(engineOperateRequest, response) + return response + } + logger.info(s"End to execute operator ${operator.getClass.getSimpleName}.") + response = EngineOperateResponse(result) + doPostHook(engineOperateRequest, response) + response + } + + private def doPreHook( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit = { + Utils.tryAndWarn { + OperationHook + .getOperationHooks() + .foreach(hook => hook.doPreOperation(engineOperateRequest, engineOperateResponse)) + } + } + + private def doPostHook( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit = { + Utils.tryAndWarn { + OperationHook + .getOperationHooks() + .foreach(hook => hook.doPostOperation(engineOperateRequest, engineOperateResponse)) } - EngineOperateResponse(result) } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala index 452c6305b0f..026234e9380 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala @@ -51,7 +51,7 @@ class EngineConnTimedLockService extends LockService with Logging { private var lockType: EngineLockType = EngineLockType.Timed private def isSupportParallelism: Boolean = - AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM + AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getHotValue() /** * @param lock @@ -161,9 +161,7 @@ class EngineConnTimedLockService extends LockService with Logging { .toString ) if (isLockExist(lock)) { - logger.info( - s"try to unlock lockEntity : lockString=$lockString,lockedBy=${engineConnLock.lockedBy.getId}" - ) + logger.info(s"try to unlock lockEntity : lockString=$lockString") engineConnLock.release() this.lockString = null true diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala index bfecf732524..77344921e36 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala @@ -33,4 +33,17 @@ trait ExecutorHeartbeatService { def dealNodeHeartbeatRequest(nodeHeartbeatRequest: NodeHeartbeatRequest): NodeHeartbeatMsg + def setSelfUnhealthy(reason: String): Unit + +} + +object ExecutorHeartbeatServiceHolder { + + private var executorHeartbeatService: ExecutorHeartbeatService = _ + + def registerHeartBeatService(executorHeartbeatService: ExecutorHeartbeatService): Unit = + this.executorHeartbeatService = executorHeartbeatService + + def getDefaultHeartbeatService(): ExecutorHeartbeatService = executorHeartbeatService + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala index fc48a51f1f0..9b4a3ebc289 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala @@ -17,4 +17,18 @@ package org.apache.linkis.engineconn.acessible.executor.utils -object AccessibleExecutorUtils {} +import org.apache.linkis.DataWorkCloudApplication.getApplicationContext +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager +import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy + +object AccessibleExecutorUtils { + + val manager: DefaultNodeHealthyInfoManager = + getApplicationContext.getBean(classOf[DefaultNodeHealthyInfoManager]) + + /** 当前引擎是否不健康 不健康返回 true */ + def currentEngineIsUnHealthy(): Boolean = { + manager != null && manager.getNodeHealthy() == NodeHealthy.UnHealthy + } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala index d7ad2c79797..dfe1137084a 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala @@ -20,17 +20,22 @@ package org.apache.linkis.engineconn.callback.hook import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.conf.DWCArgumentsParser import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor import org.apache.linkis.engineconn.callback.service.{ EngineConnAfterStartCallback, - EngineConnPidCallback + EngineConnIdentifierCallback } import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.common.hook.EngineConnHook +import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.core.hook.ShutdownHook import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback +import org.apache.linkis.manager.label.constant.LabelValueConstant +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender import org.apache.linkis.server.conf.ServerConfiguration @@ -58,16 +63,31 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { val newMap = map.++(parser.getSpringConfMap) newMap.put("spring.mvc.servlet.path", ServerConfiguration.BDP_SERVER_RESTFUL_URI.getValue) DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(newMap.toMap)) + val context = EngineConnObject.getEngineCreationContext - val engineConnPidCallBack = new EngineConnPidCallback(engineCreationContext.getEMInstance) - Utils.tryAndError(engineConnPidCallBack.callback()) + val label = LabelUtil.getEngingeConnRuntimeModeLabel(context.getLabels()) + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) { + logger.info("cluster mode call back will be invoke in beforeExecutionExecute") + } else { + val engineConnPidCallBack = new EngineConnIdentifierCallback() + Utils.tryAndError(engineConnPidCallBack.callback()) + } logger.info("<--------------------SpringBoot App init succeed-------------------->") } override def beforeExecutionExecute( engineCreationContext: EngineCreationContext, engineConn: EngineConn - ): Unit = {} + ): Unit = { + val context = EngineConnObject.getEngineCreationContext + + val label = LabelUtil.getEngingeConnRuntimeModeLabel(context.getLabels()) + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) { + logger.info("cluster mode call back be invoke") + val engineConnPidCallBack = new EngineConnIdentifierCallback() + Utils.tryAndError(engineConnPidCallBack.callback()) + } + } override def afterExecutionExecute( engineCreationContext: EngineCreationContext, @@ -78,9 +98,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { engineCreationContext: EngineCreationContext, throwable: Throwable ): Unit = { - val engineConnAfterStartCallback = new EngineConnAfterStartCallback( - engineCreationContext.getEMInstance - ) + val engineConnAfterStartCallback = new EngineConnAfterStartCallback val prefixMsg = Sender.getThisServiceInstance + s": log dir: ${EngineConnConf.getLogDir}," Utils.tryAndError( engineConnAfterStartCallback.callback( @@ -88,7 +106,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { Sender.getThisServiceInstance, engineCreationContext.getTicketId, NodeStatus.Failed, - prefixMsg + ExceptionUtils.getRootCauseMessage(throwable) + prefixMsg + ExceptionUtils.getStackTrace(throwable) ) ) ) @@ -99,15 +117,29 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { protected def getNodeStatusOfStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn - ): NodeStatus = NodeStatus.Success + ): NodeStatus = { + ExecutorManager.getInstance.getReportExecutor match { + case executor: AccessibleExecutor => + if ( + executor.getStatus == NodeStatus.ShuttingDown || executor.getStatus == NodeStatus.Failed + ) { + logger.info( + "The status of EngineConn is {}, and the actual status will be reported", + executor.getStatus + ) + executor.getStatus + } else { + NodeStatus.Unlock + } + case _ => NodeStatus.Unlock + } + } override def afterEngineServerStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn ): Unit = { - val engineConnAfterStartCallback = new EngineConnAfterStartCallback( - engineCreationContext.getEMInstance - ) + val engineConnAfterStartCallback = new EngineConnAfterStartCallback Utils.tryAndError( engineConnAfterStartCallback.callback( EngineConnStatusCallback( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala index fe6275ce675..d61e711f5d0 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala @@ -17,10 +17,7 @@ package org.apache.linkis.engineconn.callback.service -import org.apache.linkis.common.ServiceInstance - -class EngineConnAfterStartCallback(emInstance: ServiceInstance) - extends AbstractEngineConnStartUpCallback(emInstance) { +class EngineConnAfterStartCallback extends AbstractEngineConnStartUpCallback { override def callback(): Unit = {} } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala index 8a028d0a907..d1eb83d3913 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala @@ -17,8 +17,8 @@ package org.apache.linkis.engineconn.callback.service -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.utils.Logging +import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback import org.apache.linkis.protocol.message.RequestProtocol @@ -26,31 +26,25 @@ import org.apache.linkis.rpc.Sender trait EngineConnCallback { - protected def getEMSender: Sender - def callback(): Unit } -abstract class AbstractEngineConnStartUpCallback(emInstance: ServiceInstance) - extends EngineConnCallback - with Logging { - - override protected def getEMSender: Sender = { - Sender.getSender(emInstance) - } +abstract class AbstractEngineConnStartUpCallback() extends EngineConnCallback with Logging { def callback(protocol: RequestProtocol): Unit = { protocol match { case protocol: EngineConnStatusCallback => if (protocol.status.equals(NodeStatus.Failed)) { - logger.error(s"protocol will send to em: ${protocol}") + logger.error(s"EngineConn Start Failed protocol will send to LM: ${protocol}") } else { - logger.info(s"protocol will send to em: ${protocol}") + logger.info(s"protocol will send to lm: ${protocol}") } case _ => } - getEMSender.send(protocol) + Sender + .getSender(GovernanceCommonConf.ENGINE_APPLICATION_MANAGER_SPRING_NAME.getValue) + .send(protocol) } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala new file mode 100644 index 00000000000..8b9e3ad36e3 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.callback.service + +import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.YarnExecutor +import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid +import org.apache.linkis.manager.label.constant.LabelValueConstant +import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.rpc.Sender + +import java.lang.management.ManagementFactory + +class EngineConnIdentifierCallback extends AbstractEngineConnStartUpCallback { + + override def callback(): Unit = { + + var identifier = ManagementFactory.getRuntimeMXBean.getName.split("@")(0) + val instance = Sender.getThisServiceInstance + val context = EngineConnObject.getEngineCreationContext + + val label = LabelUtil.getEngingeConnRuntimeModeLabel(context.getLabels()) + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) { + identifier = ExecutorManager.getInstance.getReportExecutor match { + case cluster: YarnExecutor => cluster.getApplicationId + } + } + callback(ResponseEngineConnPid(instance, identifier, context.getTicketId)) + } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala deleted file mode 100644 index 23a3f90a2b9..00000000000 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineconn.callback.service - -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.engineconn.core.EngineConnObject -import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid -import org.apache.linkis.rpc.Sender - -import java.lang.management.ManagementFactory - -class EngineConnPidCallback(emInstance: ServiceInstance) - extends AbstractEngineConnStartUpCallback(emInstance) { - - override def callback(): Unit = { - val pid = ManagementFactory.getRuntimeMXBean.getName.split("@")(0) - val instance = Sender.getThisServiceInstance - val context = EngineConnObject.getEngineCreationContext - callback(ResponseEngineConnPid(instance, pid, context.getTicketId)) - } - -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala index a7675287e0b..7f70e21e21d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala @@ -20,11 +20,11 @@ package org.apache.linkis.engineconn.executor import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.common.utils.Utils -import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.engineconn.executor.conf.EngineConnExecutorConfiguration +import org.apache.linkis.governance.common.utils.GovernanceUtils import org.apache.linkis.manager.label.entity.Label import org.apache.commons.lang3.StringUtils -import org.apache.commons.lang3.time.DateFormatUtils import java.util.concurrent.atomic.AtomicInteger @@ -60,9 +60,12 @@ trait ExecutorExecutionContext { def setLabels(labels: Array[Label[_]]): Unit = this.labels = labels protected def getDefaultStorePath: String = { - val path = GovernanceCommonConf.RESULT_SET_STORE_PATH.getValue - val pathPrefix = (if (path.endsWith("/")) path else path + "/") + Utils.getJvmUser + "/" + - DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd") + "/" + val path = if (EngineConnExecutorConfiguration.LINKIS_RES_DEFAULT_ENABLED) { + GovernanceUtils.getResultParentPath(GovernanceUtils.LINKIS_DEFAULT_RES_CREATOR) + } else { + "hdfs:///apps-data/" + Utils.getJvmUser + } + val pathPrefix = (if (path.endsWith("/")) path else path + "/") + Utils.getJvmUser + "/" getJobId.map(pathPrefix + _ + "/" + System.nanoTime).getOrElse(pathPrefix + System.nanoTime) } @@ -81,11 +84,11 @@ trait ExecutorExecutionContext { protected def getDefaultResultSetByType: String def createDefaultResultSetWriter(): ResultSetWriter[_ <: MetaData, _ <: Record] = { - createResultSetWriter(getResultSetByType(getDefaultResultSetByType)) // todo check + createResultSetWriter(getResultSetByType(getDefaultResultSetByType)) } def createDefaultResultSetWriter(alias: String): ResultSetWriter[_ <: MetaData, _ <: Record] = - createResultSetWriter(getResultSetByType(getDefaultResultSetByType), alias) // todo check + createResultSetWriter(getResultSetByType(getDefaultResultSetByType), alias) def createResultSetWriter(resultSetType: String): ResultSetWriter[_ <: MetaData, _ <: Record] = createResultSetWriter(getResultSetByType(resultSetType), null) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala index 813022ceb8c..f847b9c34fc 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala @@ -66,4 +66,7 @@ object EngineConnExecutorConfiguration { val DEFAULT_EXECUTOR_NAME = CommonVars("wds.linkis.engineconn.executor.default.name", "ComputationExecutor") + val LINKIS_RES_DEFAULT_ENABLED = + CommonVars("wds.linkis.engineconn.res.default.enabled", true).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/entity/KubernetesExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/entity/KubernetesExecutor.scala new file mode 100644 index 00000000000..2c220e1314a --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/entity/KubernetesExecutor.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.executor.entity + +trait KubernetesExecutor extends Executor { + + def getKubernetesClusterID: String + + def getApplicationURL: String + + def getNamespace: String + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java index 1685f4b6524..24c8b904cd9 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java @@ -23,7 +23,7 @@ public enum EngineconnCoreErrorCodeSummary implements LinkisErrorCode { FAILED_CREATE_ELR(10001, "Failed to createEngineConnLaunchRequest(创建 EngineConnLaunchRequest失败)"), EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION( 10001, - "The engine plug-in material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)"), + "The engine plugin material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)"), ETL_REQUESTED(10001, "EngineTypeLabel are requested(需要参数 EngineTypeLabel)"), CANNOT_INSTANCE_ECE(20000, "Cannot instance EngineConnExecution(无法实例化 EngineConnExecution)"), @@ -32,7 +32,7 @@ public enum EngineconnCoreErrorCodeSummary implements LinkisErrorCode { UCL_NOT_EXISTS(20000, "UserCreatorLabel does not exist(UserCreatorLabel 不存在)"), CANNOT_HOME_PATH_EC(20001, "Cannot find the home path of engineConn(找不到 engineConn 的 home 路径)"), CANNOT_HOME_PATH_DIST( - 20001, "Cannot find the home path of engineconn dist(找不到 engineconn dist 的 home 路径)"), + 20001, "Cannot find the home path:{0} of engineconn dist(找不到 engineconn dist 的 home 路径)"), DIST_IS_EMPTY( 20001, "The dist of EngineConn is empty,engineConnType is:{0}(EngineConn 的 dist 为空,engineConnType为:{0})"), diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EngineConnPluginConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EngineConnPluginConf.scala index 704204577ee..c36d2a3b1de 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EngineConnPluginConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EngineConnPluginConf.scala @@ -35,4 +35,7 @@ object EngineConnPluginConf { "org.apache.linkis.engineconn.launch.EngineConnServer" ) + val PYTHON_VERSION_KEY: String = "python.version" + val SPARK_PYTHON_VERSION_KEY: String = "spark.python.version" + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala index 290c6211e1b..680c1c149b7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala @@ -36,18 +36,15 @@ object EnvConfiguration { CommonVars[String]("HADOOP_CONF_DIR", "/appcom/config/hadoop-config").getValue ) - val ENGINE_CONN_JARS = CommonVars("wds.linkis.engineConn.jars", "", "engineConn额外的Jars") - val ENGINE_CONN_CLASSPATH_FILES = - CommonVars("wds.linkis.engineConn.files", "", "engineConn额外的配置文件") + CommonVars("linkis.engineConn.classpath.files", "", "engineConn额外的配置文件") + + val MAX_METASPACE_SIZE = CommonVars("linkis.engineconn.metaspace.size.max", "512m") - val metaspaceSize = if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { - "-XX:MaxMetaspaceSize=256m -XX:MetaspaceSize=128m" - } else { - "-XX:MaxPermSize=256m -XX:PermSize=128m" - } + lazy val metaspaceSize = + s"-XX:MaxMetaspaceSize=${MAX_METASPACE_SIZE.getValue} -XX:MetaspaceSize=128m" - val ENGINE_CONN_DEFAULT_JAVA_OPTS = CommonVars[String]( + lazy val ENGINE_CONN_DEFAULT_JAVA_OPTS = CommonVars[String]( "wds.linkis.engineConn.javaOpts.default", s"-XX:+UseG1GC ${metaspaceSize} " + s"-Xloggc:%s -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Dwds.linkis.server.conf=linkis-engineconn.properties -Dwds.linkis.gateway.url=${Configuration.getGateWayURL()}" diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala index 5271ec37e90..e46126523a7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala @@ -53,7 +53,7 @@ abstract class JavaProcessEngineConnLaunchBuilder this.engineConnResourceGenerator = engineConnResourceGenerator protected def getGcLogDir(engineConnBuildRequest: EngineConnBuildRequest): String = - variable(LOG_DIRS) + "/gc.log" + variable(LOG_DIRS) + "/gc" protected def getLogDir(engineConnBuildRequest: EngineConnBuildRequest): String = s" -Dlogging.file=${EnvConfiguration.LOG4J2_XML_FILE.getValue} " + @@ -117,49 +117,27 @@ abstract class JavaProcessEngineConnLaunchBuilder addPathToClassPath(environment, variable(HIVE_CONF_DIR)) } // first, add engineconn conf dirs. - addPathToClassPath(environment, Seq(variable(PWD), ENGINE_CONN_CONF_DIR_NAME)) + addPathToClassPath(environment, buildPath(Seq(variable(PWD), ENGINE_CONN_CONF_DIR_NAME))) // then, add LINKIS_CONF_DIR conf dirs. - addPathToClassPath(environment, Seq(EnvConfiguration.LINKIS_CONF_DIR.getValue)) + addPathToClassPath(environment, buildPath(Seq(EnvConfiguration.LINKIS_CONF_DIR.getValue))) // then, add engineconn libs. - addPathToClassPath(environment, Seq(variable(PWD), ENGINE_CONN_LIB_DIR_NAME + "/*")) + addPathToClassPath(environment, buildPath(Seq(variable(PWD), ENGINE_CONN_LIB_DIR_NAME + "/*"))) // then, add public modules. if (!enablePublicModule) { - addPathToClassPath(environment, Seq(LINKIS_PUBLIC_MODULE_PATH.getValue + "/*")) + addPathToClassPath(environment, buildPath(Seq(LINKIS_PUBLIC_MODULE_PATH.getValue + "/*"))) } // finally, add the suitable properties key to classpath - engineConnBuildRequest.engineConnCreationDesc.properties.asScala.foreach { case (key, value) => - if ( - key - .startsWith("engineconn.classpath") || key.startsWith("wds.linkis.engineconn.classpath") - ) { - addPathToClassPath(environment, Seq(variable(PWD), new File(value).getName)) - } - } - getExtraClassPathFile.foreach { file: String => - addPathToClassPath(environment, Seq(variable(PWD), new File(file).getName)) + val taskClassPathFiles = EnvConfiguration.ENGINE_CONN_CLASSPATH_FILES.getValue( + engineConnBuildRequest.engineConnCreationDesc.properties + ) + if (StringUtils.isNotBlank(taskClassPathFiles)) { + taskClassPathFiles + .split(",") + .filter(StringUtils.isNotBlank(_)) + .foreach(file => addPathToClassPath(environment, buildPath(Seq(file)))) } - engineConnBuildRequest match { - case richer: RicherEngineConnBuildRequest => - def addFiles(files: String): Unit = if (StringUtils.isNotBlank(files)) { - files - .split(",") - .foreach(file => - addPathToClassPath(environment, Seq(variable(PWD), new File(file).getName)) - ) - } - - val configs: util.Map[String, String] = - richer.getStartupConfigs.asScala - .filter(_._2.isInstanceOf[String]) - .map { case (k, v: String) => - k -> v - } - .asJava - val jars: String = EnvConfiguration.ENGINE_CONN_JARS.getValue(configs) - addFiles(jars) - val files: String = EnvConfiguration.ENGINE_CONN_CLASSPATH_FILES.getValue(configs) - addFiles(files) - case _ => + getExtraClassPathFile.filter(StringUtils.isNotBlank(_)).foreach { file: String => + addPathToClassPath(environment, buildPath(Seq(new File(file).getName))) } environment } @@ -198,7 +176,7 @@ abstract class JavaProcessEngineConnLaunchBuilder ) ++: engineConnResource.getOtherBmlResources.toList }.asJava - private implicit def buildPath(paths: Seq[String]): String = + private def buildPath(paths: Seq[String]): String = Paths.get(paths.head, paths.tail: _*).toFile.getPath } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala index 3b3005fee6e..8bcc79b4109 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/EngineResourceRequest.scala @@ -22,7 +22,7 @@ import org.apache.linkis.protocol.message.RequestProtocol import java.util -trait EngineResourceRequest extends RequestProtocol { +trait EngineResourceRequest { val user: String val labels: util.List[Label[_]] val properties: util.Map[String, String] diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala index aada8caedcc..02565a394b0 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala @@ -34,6 +34,9 @@ class UserNodeResource extends NodeResource { private var leftResource: Resource = _ private var createTime: Date = _ private var updateTime: Date = _ + private var maxApps: Int = _ + private var numPendingApps: Int = _ + private var numActiveApps: Int = _ def getUser: String = user @@ -87,4 +90,23 @@ class UserNodeResource extends NodeResource { override def getId: Integer = id override def setId(id: Integer): Unit = this.id = id + + override def getMaxApps: Integer = maxApps + + override def setMaxApps(maxApps: Integer): Unit = { + this.maxApps = maxApps + } + + override def getNumPendingApps: Integer = numPendingApps + + override def setNumPendingApps(numPendingApps: Integer): Unit = { + this.numPendingApps = numPendingApps + } + + override def getNumActiveApps: Integer = numActiveApps + + override def setNumActiveApps(numActiveApps: Integer): Unit = { + this.numActiveApps = numActiveApps + } + } diff --git a/linkis-computation-governance/linkis-entrance/pom.xml b/linkis-computation-governance/linkis-entrance/pom.xml index b9ebec930e8..a2f65bb99c2 100644 --- a/linkis-computation-governance/linkis-entrance/pom.xml +++ b/linkis-computation-governance/linkis-entrance/pom.xml @@ -60,18 +60,6 @@ provided - - org.apache.linkis - linkis-cs-client - ${project.version} - - - - org.apache.linkis - linkis-error-code-client - ${project.version} - - org.apache.linkis linkis-computation-governance-common @@ -92,13 +80,13 @@ org.apache.linkis - linkis-bml-client + linkis-pes-client ${project.version} org.apache.linkis - linkis-instance-label-client + linkis-pes-rpc-client ${project.version} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java index 0bf27a68b3a..a07f99f3069 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java @@ -28,30 +28,17 @@ import org.apache.linkis.entrance.interceptor.EntranceInterceptor; import org.apache.linkis.entrance.interceptor.OnceJobInterceptor; import org.apache.linkis.entrance.interceptor.impl.*; -import org.apache.linkis.entrance.log.CacheLogManager; -import org.apache.linkis.entrance.log.ErrorCodeListener; -import org.apache.linkis.entrance.log.ErrorCodeManager; -import org.apache.linkis.entrance.log.FlexibleErrorCodeManager$; -import org.apache.linkis.entrance.log.LogManager; -import org.apache.linkis.entrance.log.PersistenceErrorCodeListener; +import org.apache.linkis.entrance.log.*; import org.apache.linkis.entrance.parser.CommonEntranceParser; -import org.apache.linkis.entrance.persistence.EntranceResultSetEngine; -import org.apache.linkis.entrance.persistence.PersistenceEngine; -import org.apache.linkis.entrance.persistence.PersistenceManager; -import org.apache.linkis.entrance.persistence.QueryPersistenceEngine; -import org.apache.linkis.entrance.persistence.QueryPersistenceManager; -import org.apache.linkis.entrance.persistence.ResultSetEngine; +import org.apache.linkis.entrance.persistence.*; import org.apache.linkis.entrance.scheduler.EntranceGroupFactory; +import org.apache.linkis.entrance.scheduler.EntranceParallelConsumerManager; import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; -import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder; -import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder$; -import org.apache.linkis.orchestrator.ecm.entity.Policy; import org.apache.linkis.scheduler.Scheduler; import org.apache.linkis.scheduler.SchedulerContext; import org.apache.linkis.scheduler.executer.ExecutorManager; import org.apache.linkis.scheduler.queue.ConsumerManager; import org.apache.linkis.scheduler.queue.GroupFactory; -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager; import org.apache.linkis.scheduler.queue.parallelqueue.ParallelScheduler; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; @@ -137,20 +124,19 @@ public EntranceInterceptor[] entranceInterceptors() { new OnceJobInterceptor(), new CSEntranceInterceptor(), new ShellDangerousGrammerInterceptor(), - // new PythonCodeCheckInterceptor(), - // new DBInfoCompleteInterceptor(), new CompatibleInterceptor(), new SparkCodeCheckInterceptor(), new SQLCodeCheckInterceptor(), new LabelCheckInterceptor(), new ParserVarLabelInterceptor(), new VarSubstitutionInterceptor(), + new AISQLTransformInterceptor(), + new SensitiveCheckInterceptor(), new LogPathCreateInterceptor(), - new StorePathEntranceInterceptor(), new ScalaCodeInterceptor(), + new PythonCodeCheckInterceptor(), new SQLLimitEntranceInterceptor(), new CommentInterceptor(), - new SetTenantLabelInterceptor(), new UserCreatorIPCheckInterceptor() }; } @@ -190,7 +176,7 @@ public GroupFactory groupFactory() { @Bean @ConditionalOnMissingBean public ConsumerManager consumerManager() { - return new ParallelConsumerManager( + return new EntranceParallelConsumerManager( ENTRANCE_SCHEDULER_MAX_PARALLELISM_USERS().getValue(), "EntranceJobScheduler"); } @@ -204,9 +190,7 @@ public SchedulerContext schedulerContext( @Bean @ConditionalOnMissingBean public ExecutorManager executorManager(GroupFactory groupFactory) { - EngineConnManagerBuilder engineConnManagerBuilder = EngineConnManagerBuilder$.MODULE$.builder(); - engineConnManagerBuilder.setPolicy(Policy.Process); - return new EntranceExecutorManagerImpl(groupFactory, engineConnManagerBuilder.build()); + return new EntranceExecutorManagerImpl(groupFactory); } @Bean diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java index cb37279c113..bee17b8ed4e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java @@ -26,4 +26,6 @@ private ServiceNameConsts() {} public static final String ENTRANCE_SERVER = "entranceServer"; public static final String ENTRANCE_INTERCEPTOR = "entranceInterceptors"; + + public static final String ENTRANCE_FAILOVER_SERVER = "entranceFailoverServer"; } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java index 2f045a17602..147beda8a85 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java @@ -49,7 +49,10 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { JOBRESP_PROTOCOL_NULL( 20011, "Request jobHistory failed, because:jobRespProtocol is null (请求jobHistory失败,因为jobRespProtocol为null)"), - READ_TASKS_FAILED(20011, "Read all tasks failed, because:{0}(获取所有任务失败)"), + READ_TASKS_FAILED(20011, "Read all tasks failed, because:{0}(获取所有任`务失败)"), + EXEC_FAILED_TO_RETRY( + 20503, + "Resources are tight, and the task is executing intelligent retry (资源紧张,当前任务正在进行智能重试)"), SENDER_RPC_FAILED(20020, "Sender rpc failed"), @@ -62,6 +65,11 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { INVALID_RESULTSETS(20053, "Invalid resultsets, cannot use cache(结果集无效,无法使用 cache)"), SUBMITTING_QUERY_FAILED(30009, "Submitting the query failed(提交查询失败)!"), + + SUBMIT_CODE_ISEMPTY( + 30010, + "Submitting the execution code, after code preprocessing, the real execution code is empty, please check the executed code(提交的执行代码,经过预处理后为空,请检查执行的代码是否为空或则只有注解)!"), + QUERY_STATUS_FAILED(50081, "Query from jobHistory status failed(从 jobHistory 状态查询失败)"), GET_QUERY_RESPONSE(50081, "Get query response incorrectly(获取查询响应结果不正确)"), QUERY_TASKID_ERROR(50081, "Query task of taskId:{0} error(查询任务id:{}的任务出错)"), @@ -71,7 +79,13 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { SHELL_BLACKLISTED_CODE(50081, "Shell code contains blacklisted code(shell中包含黑名单代码)"), JOB_HISTORY_FAILED_ID(50081, ""), - LOGPATH_NOT_NULL(20301, "The logPath cannot be empty(日志路径不能为空)"); + LOGPATH_NOT_NULL(20301, "The logPath cannot be empty(日志路径不能为空)"), + + DOCTORIS_ERROR(20302, "Doctoris data retrieval is abnormal"), + + FAILOVER_RUNNING_TO_CANCELLED( + 30001, + "Job {0} failover, status changed from Running to Cancelled (任务故障转移,状态从Running变更为Cancelled)"); /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java index d9b33820fba..1eb911ecec3 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java @@ -27,8 +27,10 @@ import org.apache.linkis.entrance.log.WebSocketCacheLogReader; import org.apache.linkis.entrance.log.WebSocketLogWriter; import org.apache.linkis.entrance.persistence.PersistenceManager; +import org.apache.linkis.entrance.utils.CommonLogPathUtils; import org.apache.linkis.governance.common.conf.GovernanceCommonConf; import org.apache.linkis.governance.common.constant.job.JobRequestConstants; +import org.apache.linkis.governance.common.entity.job.JobRequest; import org.apache.linkis.governance.common.protocol.task.RequestTask$; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.orchestrator.plans.ast.QueryParams$; @@ -125,11 +127,12 @@ public ExecuteRequest jobToExecuteRequest() throws EntranceErrorException { // add resultSet path root Map starupMapTmp = new HashMap<>(); Map starupMapOri = TaskUtils.getStartupMap(getParams()); + JobRequest jobRequest = getJobRequest(); if (starupMapOri.isEmpty()) { TaskUtils.addStartupMap(getParams(), starupMapOri); } if (!starupMapOri.containsKey(JobRequestConstants.JOB_REQUEST_LIST())) { - starupMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(getJobRequest().getId())); + starupMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(jobRequest.getId())); } for (Map.Entry entry : starupMapOri.entrySet()) { if (null != entry.getKey() && null != entry.getValue()) { @@ -142,7 +145,7 @@ public ExecuteRequest jobToExecuteRequest() throws EntranceErrorException { runtimeMapOri = TaskUtils.getRuntimeMap(getParams()); } if (!runtimeMapOri.containsKey(JobRequestConstants.JOB_ID())) { - runtimeMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(getJobRequest().getId())); + runtimeMapOri.put(JobRequestConstants.JOB_ID(), String.valueOf(jobRequest.getId())); } Map runtimeMapTmp = new HashMap<>(); for (Map.Entry entry : runtimeMapOri.entrySet()) { @@ -150,13 +153,21 @@ public ExecuteRequest jobToExecuteRequest() throws EntranceErrorException { runtimeMapTmp.put(entry.getKey(), entry.getValue().toString()); } } + String resultSetPathRoot = GovernanceCommonConf.RESULT_SET_STORE_PATH().getValue(runtimeMapTmp); + + if (!runtimeMapTmp.containsKey(GovernanceCommonConf.RESULT_SET_STORE_PATH().key())) { + String resultParentPath = CommonLogPathUtils.getResultParentPath(jobRequest); + CommonLogPathUtils.buildCommonPath(resultParentPath, true); + resultSetPathRoot = CommonLogPathUtils.getResultPath(jobRequest); + } + Map jobMap = new HashMap(); jobMap.put(RequestTask$.MODULE$.RESULT_SET_STORE_PATH(), resultSetPathRoot); runtimeMapOri.put(QueryParams$.MODULE$.JOB_KEY(), jobMap); - + jobRequest.setResultLocation(resultSetPathRoot); EntranceExecuteRequest executeRequest = new EntranceExecuteRequest(this); - List> labels = new ArrayList>(getJobRequest().getLabels()); + List> labels = new ArrayList>(jobRequest.getLabels()); executeRequest.setLabels(labels); return executeRequest; } @@ -224,26 +235,32 @@ public JobInfo getJobInfo() { : "not submit to ec"; StringBuffer sb = new StringBuffer(); - sb.append("Task creation time(任务创建时间): ") + sb.append("Task time point information(任务时间节点信息):\n") + .append("[Task creation time(任务创建时间)] :") .append(createTime) - .append(", Task scheduling time(任务调度时间): ") + .append("\n") + .append("[Task scheduling time(任务调度时间)]:") .append(scheduleTime) - .append(", Task start time(任务开始时间): ") + .append("\n") + .append("[Task start time(任务开始时间)] :") .append(startTime) - .append(", Mission end time(任务结束时间): ") + .append("\n") + .append("[Task end time(任务结束时间)] :") .append(endTime) .append("\n") .append(LogUtils.generateInfo("")) - .append("Task submit to Orchestrator time:") + .append("[Task submit to Orchestrator time]:") .append(jobToOrchestrator) - .append(", Task request EngineConn time:") + .append("\n") + .append("[Task request EngineConn time] :") .append(jobRequestEC) - .append(", Task submit to EngineConn time:") + .append("\n") + .append("[Task submit to EngineConn time] :") .append(jobSubmitToEC) .append("\n") .append( LogUtils.generateInfo( - "Your mission(您的任务) " + "Your task jobId(您的任务) " + this.getJobRequest().getId() + " The total time spent is(总耗时时间为): " + runTime)); @@ -269,4 +286,13 @@ public void close() throws IOException { logger.warn("Close logWriter and logReader failed. {}", e.getMessage(), e); } } + + @Override + public void clear() { + super.clear(); + this.setParams(null); + JobRequest jobRequest = this.getJobRequest(); + jobRequest.setExecutionCode(null); + jobRequest.setMetrics(null); + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java index dd80bc8e846..f02ed2bc813 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/AbstractEntranceParser.java @@ -19,6 +19,7 @@ import org.apache.linkis.entrance.EntranceContext; import org.apache.linkis.entrance.EntranceParser; +import org.apache.linkis.entrance.conf.EntranceConfiguration; import org.apache.linkis.entrance.exception.EntranceErrorCode; import org.apache.linkis.entrance.exception.EntranceIllegalParamException; import org.apache.linkis.entrance.execute.EntranceJob; @@ -90,6 +91,16 @@ public JobRequest parseToJobRequest(Job job) throws EntranceIllegalParamExceptio jobRequest.setProgress("" + job.getProgress()); jobRequest.setStatus(job.getState().toString()); jobRequest.setUpdatedTime(new Date()); + + if (job.isCompleted() + && !job.isSucceed() + && EntranceConfiguration.TASK_RETRY_ENABLED() + && Integer.valueOf(20503).equals(jobRequest.getErrorCode()) + && job.getErrorResponse() != null + && StringUtils.isNotEmpty(job.getErrorResponse().message())) { + jobRequest.setErrorDesc(job.getErrorResponse().message()); + } + if (job.isCompleted() && !job.isSucceed() && job.getErrorResponse() != null diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java index 86af74d5c88..d5aaa1a71d5 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java @@ -17,9 +17,10 @@ package org.apache.linkis.entrance.parser; +import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.entrance.conf.EntranceConfiguration$; import org.apache.linkis.entrance.utils.CommonLogPathUtils; +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; import org.apache.linkis.governance.common.entity.job.JobRequest; import org.apache.linkis.manager.label.utils.LabelUtil; import org.apache.linkis.storage.utils.StorageUtils; @@ -28,67 +29,47 @@ import java.text.SimpleDateFormat; import java.util.Date; -import java.util.HashMap; import java.util.Map; public final class ParserUtils { - private static final Map types = new HashMap<>(); - - static { - types.put("py", "python"); - types.put("python", "python"); - types.put("sql", "sql"); - types.put("pyspark", "python"); - types.put("scala", "scala"); - types.put("rspark", "r"); - types.put("r", "r"); - types.put("java", "java"); - types.put("hql", "hql"); - types.put("sparksql", "sql"); - } - public static void generateLogPath(JobRequest jobRequest, Map params) { String logPath = null; String logPathPrefix = null; String logMid = "log"; if (StringUtils.isEmpty(logPathPrefix)) { - logPathPrefix = EntranceConfiguration$.MODULE$.DEFAULT_LOGPATH_PREFIX().getValue(); + logPathPrefix = GovernanceCommonConf.DEFAULT_LOGPATH_PREFIX(); } /*Determine whether logPathPrefix is terminated with /, if it is, delete */ /*判断是否logPathPrefix是否是以 / 结尾, 如果是,就删除*/ - if (logPathPrefix.endsWith("/")) { + if (logPathPrefix.endsWith("/")) { // NOSONAR logPathPrefix = logPathPrefix.substring(0, logPathPrefix.length() - 1); } Date date = new Date(System.currentTimeMillis()); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + SimpleDateFormat hourFormat = new SimpleDateFormat("HH"); // 新增:小时格式(24小时制) String dateString = dateFormat.format(date); + String hourString = hourFormat.format(date); // 新增:当前小时(如 "08", "14") String creator = LabelUtil.getUserCreator(jobRequest.getLabels())._2; String umUser = jobRequest.getExecuteUser(); FsPath lopPrefixPath = new FsPath(logPathPrefix); if (StorageUtils.HDFS().equals(lopPrefixPath.getFsType())) { - String commonLogPath = logPathPrefix + "/" + "log" + "/" + dateString + "/" + creator; + String commonLogPath = logPathPrefix + "/" + "log" + "/" + dateString + "/"; + if (Configuration.HDFS_HOUR_DIR_SWITCH()) { + commonLogPath = commonLogPath + hourString + "/" + creator; + } else { + commonLogPath = commonLogPath + creator; + } logPath = commonLogPath + "/" + umUser + "/" + jobRequest.getId() + ".log"; - CommonLogPathUtils.buildCommonPath(commonLogPath); + CommonLogPathUtils.buildCommonPath(commonLogPath, false); } else { - logPath = - logPathPrefix - + "/" - + umUser - + "/" - + "log" - + "/" - + creator - + "/" - + dateString - + "/" - + jobRequest.getId() - + ".log"; + logPath = logPathPrefix + "/" + umUser + "/log/" + creator + "/"; + if (Configuration.HDFS_HOUR_DIR_SWITCH()) { + logPath = logPath + dateString + "/" + hourString + "/" + jobRequest.getId() + ".log"; + } else { + logPath = logPath + hourString + "/" + jobRequest.getId() + ".log"; + } } jobRequest.setLogPath(logPath); } - - public static String getCorrespondingType(String runType) { - return types.get(runType.toLowerCase()); - } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java index 2fdf4b9b4a5..7fdaf06eb27 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceEngine.java @@ -17,10 +17,10 @@ package org.apache.linkis.entrance.persistence; +import org.apache.linkis.common.conf.Configuration$; import org.apache.linkis.common.exception.ErrorException; import org.apache.linkis.common.utils.JsonUtils; import org.apache.linkis.entrance.conf.EntranceConfiguration; -import org.apache.linkis.entrance.conf.EntranceConfiguration$; import org.apache.linkis.entrance.exception.EntranceIllegalParamException; import org.apache.linkis.entrance.exception.EntranceRPCException; import org.apache.linkis.entrance.exception.QueryFailedException; @@ -65,8 +65,7 @@ public QueryPersistenceEngine() { Get the corresponding sender through datawork-linkis-publicservice(通过datawork-linkis-publicservice 拿到对应的sender) */ sender = - Sender.getSender( - EntranceConfiguration$.MODULE$.JOBHISTORY_SPRING_APPLICATION_NAME().getValue()); + Sender.getSender(Configuration$.MODULE$.JOBHISTORY_SPRING_APPLICATION_NAME().getValue()); } private JobRespProtocol sendToJobHistoryAndRetry(RequestProtocol jobReq, String msg) diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java index 39964fdad1d..f256e112cde 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java @@ -18,22 +18,39 @@ package org.apache.linkis.entrance.persistence; import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.utils.LinkisUtils; import org.apache.linkis.entrance.EntranceContext; import org.apache.linkis.entrance.cli.heartbeat.CliHeartbeatMonitor; +import org.apache.linkis.entrance.conf.EntranceConfiguration; import org.apache.linkis.entrance.cs.CSEntranceHelper; import org.apache.linkis.entrance.execute.EntranceJob; import org.apache.linkis.entrance.log.FlexibleErrorCodeManager; +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; import org.apache.linkis.governance.common.entity.job.JobRequest; +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; +import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel; import org.apache.linkis.protocol.engine.JobProgressInfo; +import org.apache.linkis.protocol.utils.TaskUtils; import org.apache.linkis.scheduler.executer.OutputExecuteResponse; import org.apache.linkis.scheduler.queue.Job; +import org.apache.commons.lang3.StringUtils; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + import scala.Option; import scala.Tuple2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary.EXEC_FAILED_TO_RETRY; + public class QueryPersistenceManager extends PersistenceManager { private static final Logger logger = LoggerFactory.getLogger(QueryPersistenceManager.class); @@ -103,9 +120,16 @@ public void onProgressUpdate(Job job, float progress, JobProgressInfo[] progress } catch (Exception e) { logger.warn("Invalid progress : " + entranceJob.getJobRequest().getProgress(), e); } + boolean notUpdate = false; if (job.getProgress() >= 0 && persistedProgress >= updatedProgress && entranceJob.getUpdateMetrisFlag()) { + notUpdate = true; + if (EntranceConfiguration.TASK_RETRY_ENABLED() && updatedProgress == 0) { // NOSONAR + notUpdate = false; + } + } + if (notUpdate) { return; } if (updatedProgress > 1) { @@ -118,6 +142,119 @@ public void onProgressUpdate(Job job, float progress, JobProgressInfo[] progress updateJobStatus(job); } + @Override + public boolean onJobFailed( + Job job, String code, Map props, int errorCode, String errorDesc) { + if (!EntranceConfiguration.TASK_RETRY_ENABLED()) { + return false; + } + + if (!(job instanceof EntranceJob)) { + return false; + } + + boolean containsAny = false; + String errorDescArray = EntranceConfiguration.SUPPORTED_RETRY_ERROR_DESC(); + String errorCodeArray = EntranceConfiguration.SUPPORTED_RETRY_ERROR_CODES(); + for (String keyword : errorDescArray.split(",")) { + if (errorDesc.contains(keyword.trim()) || errorCodeArray.contains(errorCode + "")) { + containsAny = true; + break; + } + } + + if (!containsAny) { + return false; + } + + AtomicBoolean canRetry = new AtomicBoolean(false); + String aiSqlKey = EntranceConfiguration.AI_SQL_KEY().key(); + String retryNumKey = EntranceConfiguration.RETRY_NUM_KEY().key(); + + final EntranceJob entranceJob = (EntranceJob) job; + + // 处理广播表 + String dataFrameKey = EntranceConfiguration.SUPPORT_ADD_RETRY_CODE_KEYS(); + if (containsAny(errorDesc, dataFrameKey)) { + entranceJob + .getJobRequest() + .setExecutionCode("set spark.sql.autoBroadcastJoinThreshold=-1; " + code); + } + + Map startupMap = TaskUtils.getStartupMap(props); + // 只对 aiSql 做重试 + if ("true".equals(startupMap.get(aiSqlKey))) { + LinkisUtils.tryAndWarn( + () -> { + int retryNum = (int) startupMap.getOrDefault(retryNumKey, 1); + boolean canRetryCode = canRetryCode(code); + if (retryNum > 0 && canRetryCode) { + logger.info( + "mark task: {} status to WaitForRetry, current retryNum: {}, for errorCode: {}, errorDesc: {}", + entranceJob.getJobInfo().getId(), + retryNum, + errorCode, + errorDesc); + // 重试 + job.transitionWaitForRetry(); + + // 修改错误码和错误描述 + entranceJob.getJobRequest().setErrorCode(EXEC_FAILED_TO_RETRY.getErrorCode()); + entranceJob.getJobRequest().setErrorDesc(EXEC_FAILED_TO_RETRY.getErrorDesc()); + canRetry.set(true); + startupMap.put(retryNumKey, retryNum - 1); + // once 引擎 + if ((boolean) EntranceConfiguration.AI_SQL_RETRY_ONCE().getValue()) { + // once 引擎 + ExecuteOnceLabel onceLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory() + .createLabel(ExecuteOnceLabel.class); + List> labels = entranceJob.getJobRequest().getLabels(); + labels.add(onceLabel); + logger.info("aisql retry add once label for task id:{}", job.getJobInfo().getId()); + startupMap.put("executeOnce", true); + } + TaskUtils.addStartupMap(props, startupMap); + logger.info("task {} set retry status success.", entranceJob.getJobInfo().getId()); + } else { + logger.info("task {} not support retry.", entranceJob.getJobInfo().getId()); + } + }, + logger); + } + return canRetry.get(); + } + + private boolean canRetryCode(String code) { + String exceptCode = EntranceConfiguration.UNSUPPORTED_RETRY_CODES(); + String[] keywords = exceptCode.split(","); + for (String keyword : keywords) { + // 使用空格分割关键字,并移除空字符串 + String[] parts = keyword.trim().split("\\s+"); + StringBuilder regexBuilder = new StringBuilder("\\s*"); + for (String part : parts) { + regexBuilder.append(part); + regexBuilder.append("\\s*"); + } + if (keyword.startsWith("CREATE")) { + regexBuilder.delete(regexBuilder.length() - 3, regexBuilder.length()); + regexBuilder.append("\\b(?!\\s+IF\\s+NOT\\s+EXISTS)"); + } + if (keyword.startsWith("DROP")) { + regexBuilder.delete(regexBuilder.length() - 3, regexBuilder.length()); + regexBuilder.append("\\b(?!\\s+IF\\s+EXISTS)"); + } + + String regex = regexBuilder.toString(); + Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(code); + if (matcher.find()) { + return false; + } + } + return true; + } + @Override public void onJobScheduled(Job job) { updateJobStatus(job); @@ -163,12 +300,18 @@ public void onJobCompleted(Job job) { } cliHeartbeatMonitor.unRegisterIfCliJob(job); updateJobStatus(job); + job.clear(); } private void updateJobStatus(Job job) { JobRequest jobRequest = null; if (job.isCompleted()) { job.setProgress(1); + } else if (job.getProgress() >= 1 && job instanceof EntranceJob) { + job.setProgress(GovernanceCommonConf.FAKE_PROGRESS()); + ((EntranceJob) job) + .getJobRequest() + .setProgress(String.valueOf(GovernanceCommonConf.FAKE_PROGRESS())); } try { jobRequest = this.entranceContext.getOrCreateEntranceParser().parseToJobRequest(job); @@ -184,10 +327,29 @@ private void updateJobStatus(Job job) { createPersistenceEngine().updateIfNeeded(jobRequest); } catch (ErrorException e) { entranceContext.getOrCreateLogManager().onLogUpdate(job, e.getMessage()); - logger.error("update job status failed, reason: ", e); + throw e; } } @Override public void onResultSizeCreated(Job job, int resultSize) {} + + private static boolean containsAny(String src, String target) { + if (StringUtils.isBlank(target)) { + return false; + } + return containsAny(src, target.split(",")); + } + + private static boolean containsAny(String src, String[] target) { + if (target == null || StringUtils.isBlank(src)) { + return false; + } + for (String item : target) { + if (src.toLowerCase().contains(item.toLowerCase())) { + return true; + } + } + return false; + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java new file mode 100644 index 00000000000..424e7ca1708 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.restful; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "entrance lable manager") +@RestController +@RequestMapping(path = "/entrance/operation/consumer") +public class EntranceConsumerRestfulApi { + + private EntranceServer entranceServer; + + private static final Logger logger = LoggerFactory.getLogger(EntranceConsumerRestfulApi.class); + + @Autowired + public void setEntranceServer(EntranceServer entranceServer) { + this.entranceServer = entranceServer; + } + + @ApiOperation(value = "kill-consumer", notes = "kill consumer", response = Message.class) + @RequestMapping(path = "/kill", method = RequestMethod.GET) + public Message killConsumer( + HttpServletRequest req, @RequestParam(value = "groupName") String groupName) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + logger.info("user {} to kill consumer {}", operationUser, groupName); + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + consumerManager.destroyConsumer(groupName); + logger.info("user {} finished to kill consumer {}", operationUser, groupName); + return Message.ok(); + } + + @ApiOperation(value = "consumer-info", notes = "list consumers info", response = Message.class) + @RequestMapping(path = "/info", method = RequestMethod.GET) + public Message countConsumer(HttpServletRequest req) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + return Message.ok().data("consumerNum", consumerManager.listConsumers().length); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java index 2ab457747cf..2c5fce26429 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java @@ -17,20 +17,27 @@ package org.apache.linkis.entrance.restful; +import org.apache.linkis.DataWorkCloudApplication; +import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.instance.label.client.InstanceLabelClient; import org.apache.linkis.manager.label.constant.LabelKeyConstant; import org.apache.linkis.manager.label.constant.LabelValueConstant; +import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.protocol.label.InsLabelRefreshRequest; +import org.apache.linkis.protocol.label.InsLabelRemoveRequest; import org.apache.linkis.rpc.Sender; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.commons.collections.CollectionUtils; + import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; import java.util.HashMap; +import java.util.List; import java.util.Map; import com.fasterxml.jackson.databind.JsonNode; @@ -47,6 +54,8 @@ public class EntranceLabelRestfulApi { private static final Logger logger = LoggerFactory.getLogger(EntranceLabelRestfulApi.class); + private static Boolean offlineFlag = false; + @ApiOperation(value = "update", notes = "update route label", response = Message.class) @ApiOperationSupport(ignoreParameters = {"jsonNode"}) @RequestMapping(path = "/update", method = RequestMethod.POST) @@ -72,13 +81,57 @@ public Message updateRouteLabel(HttpServletRequest req, @RequestBody JsonNode js public Message updateRouteLabel(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "markoffline"); Map labels = new HashMap(); - logger.info("Prepare to modify the routelabel of entry to offline"); + logger.info("Prepare to modify the routelabel of entrance to offline"); labels.put(LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE); InsLabelRefreshRequest insLabelRefreshRequest = new InsLabelRefreshRequest(); insLabelRefreshRequest.setLabels(labels); insLabelRefreshRequest.setServiceInstance(Sender.getThisServiceInstance()); InstanceLabelClient.getInstance().refreshLabelsToInstance(insLabelRefreshRequest); + synchronized (offlineFlag) { // NOSONAR + offlineFlag = true; + } logger.info("Finished to modify the routelabel of entry to offline"); return Message.ok(); } + + @ApiOperation( + value = "backonline", + notes = "from offline status to recover", + response = Message.class) + @RequestMapping(path = "/backonline", method = RequestMethod.GET) + public Message backOnline(HttpServletRequest req) { + ModuleUserUtils.getOperationUser(req, "backonline"); + logger.info("Prepare to modify the routelabel of entrance to remove offline"); + InsLabelRemoveRequest insLabelRemoveRequest = new InsLabelRemoveRequest(); + insLabelRemoveRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().removeLabelsFromInstance(insLabelRemoveRequest); + synchronized (offlineFlag) { // NOSONAR + offlineFlag = false; + } + logger.info("Finished to backonline"); + return Message.ok(); + } + + @ApiOperation(value = "isOnline", notes = "entrance isOnline", response = Message.class) + @RequestMapping(path = "/isOnline", method = RequestMethod.GET) + public Message isOnline(HttpServletRequest req) { + String thisInstance = Sender.getThisInstance(); + ServiceInstance mainInstance = DataWorkCloudApplication.getServiceInstance(); + ServiceInstance serviceInstance = new ServiceInstance(); + serviceInstance.setApplicationName(mainInstance.getApplicationName()); + serviceInstance.setInstance(thisInstance); + List> labelFromInstance = + InstanceLabelClient.getInstance().getLabelFromInstance(serviceInstance); + boolean res = true; + String offline = "offline"; + if (!CollectionUtils.isEmpty(labelFromInstance)) { + for (Label label : labelFromInstance) { + if (offline.equals(label.getValue())) { + res = false; + } + } + } + logger.info("Whether Entrance is online: {}", res); + return Message.ok().data("isOnline", res); + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java index 7d36df8fece..0ffde329f9d 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java @@ -20,8 +20,7 @@ import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.entrance.EntranceServer; import org.apache.linkis.entrance.execute.EntranceJob; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.manager.label.utils.LabelUtil; +import org.apache.linkis.entrance.scheduler.CreatorECTypeDefaultConf; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -67,7 +66,7 @@ public Message taskinfo( HttpServletRequest req, @RequestParam(value = "user", required = false) String user, @RequestParam(value = "creator", required = false) String creator, - @RequestParam(value = "engineTypeLabel", required = false) String engineTypeLabelValue) { + @RequestParam(value = "ecType", required = false) String ecType) { String userName = ModuleUserUtils.getOperationUser(req, "taskinfo"); String queryUser = user; if (Configuration.isNotAdmin(userName)) { @@ -83,23 +82,12 @@ public Message taskinfo( } else if (StringUtils.isBlank(creator)) { filterWords = queryUser; } - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords); - int taskNumber = 0; + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords, ecType); int runningNumber = 0; int queuedNumber = 0; if (null != undoneTasks) { for (EntranceJob task : undoneTasks) { - if (StringUtils.isNotBlank(engineTypeLabelValue)) { - EngineTypeLabel engineTypeLabel = - LabelUtil.getEngineTypeLabel(task.getJobRequest().getLabels()); - // Task types do not match, do not count - if (null == engineTypeLabel - || !engineTypeLabelValue.equalsIgnoreCase(engineTypeLabel.getStringValue())) { - continue; - } - } - taskNumber++; if (task.isRunning()) { runningNumber++; } else { @@ -107,17 +95,26 @@ public Message taskinfo( } } } - return Message.ok("success") - .data("taskNumber", taskNumber) - .data("runningNumber", runningNumber) - .data("queuedNumber", queuedNumber); + assert undoneTasks != null; + Message resp = + Message.ok("success") + .data("taskNumber", undoneTasks.length) + .data("runningNumber", runningNumber) + .data("queuedNumber", queuedNumber); + if (StringUtils.isNoneBlank(creator, ecType)) { + int creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creator, ecType); + resp.data("creatorECTypeMaxRunningJobs", creatorECTypeMaxRunningJobs); + resp.data("limitExceeded", runningNumber > creatorECTypeMaxRunningJobs); + } + return resp; } - @ApiOperation(value = "Status", notes = "get running task number ", response = Message.class) + @ApiOperation(value = "runningtask", notes = "get running task number ", response = Message.class) @RequestMapping(path = "/runningtask", method = RequestMethod.GET) - public Message status(HttpServletRequest req) { + public Message runningtask(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "runningtask"); - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(""); + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask("", null); Boolean isCompleted = false; if (null == undoneTasks || undoneTasks.length < 1) { isCompleted = true; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java index a5d3ace35fe..516af6e63a2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java @@ -38,6 +38,7 @@ import org.apache.linkis.server.Message; import org.apache.linkis.server.security.SecurityFilter; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.linkis.utils.LinkisSpringUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -96,7 +97,11 @@ public void setEntranceServer(EntranceServer entranceServer) { @RequestMapping(path = "/execute", method = RequestMethod.POST) public Message execute(HttpServletRequest req, @RequestBody Map json) { Message message = null; - logger.info("Begin to get an execID"); + String operationUser = ModuleUserUtils.getOperationUser(req); + logger.info( + "Begin to get execute task for user {}, Client IP {}", + operationUser, + LinkisSpringUtils.getClientIP(req)); json.put(TaskConstant.EXECUTE_USER, ModuleUserUtils.getOperationUser(req)); json.put(TaskConstant.SUBMIT_USER, SecurityFilter.getLoginUsername(req)); HashMap map = (HashMap) json.get(TaskConstant.SOURCE); @@ -110,15 +115,6 @@ public Message execute(HttpServletRequest req, @RequestBody Map JobRequest jobReq = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobReq.getId(); ModuleUserUtils.getOperationUser(req, "execute task,id: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobReq.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); String execID = ZuulEntranceUtils.generateExecID( job.getId(), @@ -132,7 +128,7 @@ public Message execute(HttpServletRequest req, @RequestBody Map + jobReqId + " in " + Sender.getThisServiceInstance().toString() - + ". Please wait it to be scheduled"), + + ". \n Please wait it to be scheduled(您的任务已经提交,进入排队中,如果一直没有更新日志,是任务并发达到了限制,可以在ITSM提Linkis参数修改单)"), job); message = Message.ok(); message.setMethod("/api/entrance/execute"); @@ -148,9 +144,25 @@ public Message execute(HttpServletRequest req, @RequestBody Map @RequestMapping(path = "/submit", method = RequestMethod.POST) public Message submit(HttpServletRequest req, @RequestBody Map json) { Message message = null; - logger.info("Begin to get an execID"); - json.put(TaskConstant.EXECUTE_USER, ModuleUserUtils.getOperationUser(req)); + String executeUser = ModuleUserUtils.getOperationUser(req); + logger.info( + "Begin to get execute task for user {}, Client IP {}", + executeUser, + LinkisSpringUtils.getClientIP(req)); json.put(TaskConstant.SUBMIT_USER, SecurityFilter.getLoginUsername(req)); + String token = ModuleUserUtils.getToken(req); + Object tempExecuteUser = json.get(TaskConstant.EXECUTE_USER); + // check special admin token + if (StringUtils.isNotBlank(token) && tempExecuteUser != null) { + if (Configuration.isAdminToken(token)) { + logger.warn( + "ExecuteUser variable will be replaced by system value: {} -> {}", + tempExecuteUser, + executeUser); + executeUser = String.valueOf(tempExecuteUser); + } + } + json.put(TaskConstant.EXECUTE_USER, executeUser); HashMap map = (HashMap) json.get(TaskConstant.SOURCE); if (map == null) { map = new HashMap<>(); @@ -162,15 +174,6 @@ public Message submit(HttpServletRequest req, @RequestBody Map j JobRequest jobRequest = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobRequest.getId(); ModuleUserUtils.getOperationUser(req, "submit jobReqId: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobRequest.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); pushLog( LogUtils.generateInfo( "Your job is accepted, jobID is " @@ -179,7 +182,7 @@ public Message submit(HttpServletRequest req, @RequestBody Map j + jobReqId + " in " + Sender.getThisServiceInstance().toString() - + ". Please wait it to be scheduled"), + + ". \n Please wait it to be scheduled(您的任务已经提交,进入排队中,如果一直没有更新日志,是任务并发达到了限制,可以在ITSM提Linkis参数修改单)"), job); String execID = ZuulEntranceUtils.generateExecID( @@ -326,7 +329,7 @@ public Message progressWithResource(HttpServletRequest req, @PathVariable("id") JobRequest jobRequest = ((EntranceJob) job.get()).getJobRequest(); Map metrics = jobRequest.getMetrics(); Map metricsVo = new HashMap<>(); - if (metrics.containsKey(TaskConstant.JOB_YARNRESOURCE)) { + if (null != metrics && metrics.containsKey(TaskConstant.JOB_YARNRESOURCE)) { HashMap resourceMap = (HashMap) metrics.get(TaskConstant.JOB_YARNRESOURCE); ArrayList resoureList = new ArrayList<>(12); @@ -594,9 +597,9 @@ public Message killJobs( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)"); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)", + t); message.setMethod("/api/entrance/" + id + "/kill"); - message.setStatus(1); } } messages.add(message); @@ -678,7 +681,9 @@ public Message kill( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)"); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败) with error:" + + t.getMessage(), + t); message.setMethod("/api/entrance/" + id + "/kill"); message.setStatus(1); } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java new file mode 100644 index 00000000000..5a91c71a110 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.conf.EntranceConfiguration; +import org.apache.linkis.entrance.utils.EntranceUtils; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryEngineConfig; +import org.apache.linkis.governance.common.protocol.conf.ResponseQueryConfig; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.apache.linkis.rpc.Sender; + +import org.apache.commons.lang3.StringUtils; + +import java.util.concurrent.TimeUnit; + +import scala.Tuple2; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CreatorECTypeDefaultConf { + + private static final Logger logger = LoggerFactory.getLogger(CreatorECTypeDefaultConf.class); + + public static Sender confSender = + Sender.getSender( + Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME().getValue()); + + private static LoadingCache confCache = + CacheBuilder.newBuilder() + .maximumSize(1000) + .expireAfterWrite( + (long) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE().getValue(), + TimeUnit.MINUTES) + .build( + new CacheLoader() { + @Override + public Integer load(String key) throws Exception { + Tuple2 tuple2 = + EntranceUtils.fromKeyGetLabels(key); + RequestQueryEngineConfig requestQueryEngineConfig = + new RequestQueryEngineConfig(tuple2._1, tuple2._2(), null); + int jobLimit = + (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + try { + Object response = confSender.ask(requestQueryEngineConfig); + if (response instanceof ResponseQueryConfig) { + jobLimit = + (int) + EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT() + .getValue(((ResponseQueryConfig) response).getKeyAndValue()); + } + } catch (Exception e) { + logger.warn("Failed to get key {} from conf", key, e); + } + return jobLimit; + } + }); + + public static int getCreatorECTypeMaxRunningJobs(String creator, String ecType) { + int jobLimit = (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + if (StringUtils.isNoneBlank(creator, ecType)) { + try { + String key = EntranceUtils.getDefaultCreatorECTypeKey(creator, ecType); + jobLimit = confCache.get(key); + } catch (Exception e) { + logger.warn("Failed to get key creator {} ecType {} from cache", creator, ecType, e); + } + } + int entranceNumber = EntranceUtils.getRunningEntranceNumber(); + return jobLimit / entranceNumber; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.java new file mode 100644 index 00000000000..ac180d1aa30 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.scheduler.SchedulerContext; +import org.apache.linkis.scheduler.queue.Consumer; +import org.apache.linkis.scheduler.queue.Group; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer; + +import java.util.concurrent.ExecutorService; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class EntranceFIFOUserConsumer extends FIFOUserConsumer { + + private static final Logger logger = LoggerFactory.getLogger(EntranceFIFOUserConsumer.class); + + public EntranceFIFOUserConsumer( + SchedulerContext schedulerContext, ExecutorService executeService, Group group) { + super(schedulerContext, executeService, group); + } + + @Override + public boolean runScheduleIntercept() { + Consumer[] consumers = getSchedulerContext().getOrCreateConsumerManager().listConsumers(); + int creatorRunningJobNum = 0; + + // APP_TEST_hadoop_hive or IDE_hadoop_hive + String groupNameStr = getGroup().getGroupName(); + String[] groupNames = groupNameStr.split("_"); + int length = groupNames.length; + if (length < 3) { + return true; + } + + // APP_TEST + int lastIndex = groupNameStr.lastIndexOf("_"); + int secondLastIndex = groupNameStr.lastIndexOf("_", lastIndex - 1); + String creatorName = groupNameStr.substring(0, secondLastIndex); + + // hive + String ecType = groupNames[length - 1]; + + for (Consumer consumer : consumers) { + String groupName = consumer.getGroup().getGroupName(); + if (groupName.startsWith(creatorName) && groupName.endsWith(ecType)) { + creatorRunningJobNum += consumer.getRunningEvents().length; + } + } + + int creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creatorName, ecType); + + if (logger.isDebugEnabled()) { + logger.debug( + "Creator: {} EC: {} there are currently:{} jobs running and maximum limit: {}", + creatorName, + ecType, + creatorRunningJobNum, + creatorECTypeMaxRunningJobs); + } + + if (creatorRunningJobNum > creatorECTypeMaxRunningJobs) { + logger.error( + "Creator: {} EC: {} there are currently:{} jobs running that exceed the maximum limit: {}", + creatorName, + ecType, + creatorRunningJobNum, + creatorECTypeMaxRunningJobs); + return false; + } else { + return true; + } + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.java new file mode 100644 index 00000000000..98f0929ee9e --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.scheduler.queue.Group; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer; +import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager; + +public class EntranceParallelConsumerManager extends ParallelConsumerManager { + + public EntranceParallelConsumerManager(int maxParallelismUsers, String schedulerName) { + super(maxParallelismUsers, schedulerName); + } + + @Override + public FIFOUserConsumer createConsumer(String groupName) { + Group group = getSchedulerContext().getOrCreateGroupFactory().getGroup(groupName); + return new EntranceFIFOUserConsumer(getSchedulerContext(), getOrCreateExecutorService(), group); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java index 4301334965a..2ad7a1a3afa 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java @@ -18,15 +18,20 @@ package org.apache.linkis.entrance.server; import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.conf.Configuration$; import org.apache.linkis.entrance.EntranceContext; import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.entrance.conf.EntranceConfiguration; import org.apache.linkis.entrance.conf.EntranceConfiguration$; import org.apache.linkis.entrance.constant.ServiceNameConsts; import org.apache.linkis.entrance.execute.EntranceJob; +import org.apache.linkis.entrance.job.EntranceExecutionJob; import org.apache.linkis.entrance.log.LogReader; import org.apache.linkis.governance.common.protocol.conf.EntranceInstanceConfRequest; import org.apache.linkis.rpc.Sender; +import org.apache.commons.io.IOUtils; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.event.ContextClosedEvent; import org.springframework.context.event.EventListener; @@ -63,9 +68,9 @@ public void init() { private void cleanUpEntranceDirtyData() { if ((Boolean) EntranceConfiguration$.MODULE$.ENABLE_ENTRANCE_DIRTY_DATA_CLEAR().getValue()) { + logger.info("start to clean up entrance dirty data."); Sender sender = - Sender.getSender( - EntranceConfiguration$.MODULE$.JOBHISTORY_SPRING_APPLICATION_NAME().getValue()); + Sender.getSender(Configuration$.MODULE$.JOBHISTORY_SPRING_APPLICATION_NAME().getValue()); ServiceInstance thisServiceInstance = Sender.getThisServiceInstance(); sender.ask(new EntranceInstanceConfRequest(thisServiceInstance.getInstance())); } @@ -93,12 +98,19 @@ private void shutdownEntrance(ContextClosedEvent event) { if (shutdownFlag) { logger.warn("event has been handled"); } else { + if (EntranceConfiguration.ENTRANCE_SHUTDOWN_FAILOVER_CONSUME_QUEUE_ENABLED()) { + logger.warn("Entrance exit to update and clean all ConsumeQueue task instances"); + // updateAllNotExecutionTaskInstances(false); + } + logger.warn("Entrance exit to stop all job"); - EntranceJob[] allUndoneJobs = getAllUndoneTask(null); - if (null != allUndoneJobs) { - for (EntranceJob job : allUndoneJobs) { + EntranceJob[] allUndoneTask = getAllUndoneTask(null, null); + if (null != allUndoneTask) { + for (EntranceJob job : allUndoneTask) { job.onFailure( - "Entrance exits the automatic cleanup task and can be rerun(服务退出自动清理任务,可以重跑)", null); + "Your job will be marked as canceled because the Entrance service restarted(因为Entrance服务重启,您的任务将被标记为取消)", + null); + IOUtils.closeQuietly(((EntranceExecutionJob) job).getLogWriter().get()); } } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala index f298e54251d..27c362885fd 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala @@ -20,6 +20,7 @@ package org.apache.linkis.entrance import org.apache.linkis.common.exception.{ErrorException, LinkisException, LinkisRuntimeException} import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.cs.CSEntranceHelper import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorException, SubmitFailedException} @@ -28,8 +29,15 @@ import org.apache.linkis.entrance.log.LogReader import org.apache.linkis.entrance.timeout.JobTimeoutManager import org.apache.linkis.entrance.utils.JobHistoryHelper import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.utils.LoggerUtils import org.apache.linkis.protocol.constants.TaskConstant +import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.rpc.Sender +import org.apache.linkis.scheduler.conf.SchedulerConfiguration.{ + ENGINE_PRIORITY_RUNTIME_KEY, + FIFO_QUEUE_STRATEGY, + PFIFO_SCHEDULER_STRATEGY +} import org.apache.linkis.scheduler.queue.{Job, SchedulerEventState} import org.apache.linkis.server.conf.ServerConfiguration @@ -38,6 +46,7 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.text.MessageFormat import java.util +import java.util.concurrent.TimeUnit abstract class EntranceServer extends Logging { @@ -45,6 +54,8 @@ abstract class EntranceServer extends Logging { private val jobTimeoutManager: JobTimeoutManager = new JobTimeoutManager() + private val timeoutCheck = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + def init(): Unit def getName: String @@ -78,12 +89,15 @@ abstract class EntranceServer extends Logging { } logger.info(s"received a request,convert $jobRequest") + LoggerUtils.setJobIdMDC(jobRequest.getId.toString) + val logAppender = new java.lang.StringBuilder() Utils.tryThrow( getEntranceContext .getOrCreateEntranceInterceptors() .foreach(int => jobRequest = int.apply(jobRequest, logAppender)) ) { t => + LoggerUtils.removeJobIdMDC() val error = t match { case error: ErrorException => error case t1: Throwable => @@ -131,11 +145,52 @@ abstract class EntranceServer extends Logging { job.setLogListener(getEntranceContext.getOrCreateLogManager()) job.setProgressListener(getEntranceContext.getOrCreatePersistenceManager()) job.setJobListener(getEntranceContext.getOrCreatePersistenceManager()) + job.setJobRetryListener(getEntranceContext.getOrCreatePersistenceManager()) job match { case entranceJob: EntranceJob => entranceJob.setEntranceListenerBus(getEntranceContext.getOrCreateEventListenerBus) case _ => } + + /** + * job.afterStateChanged() method is only called in job.run(), and job.run() is called only + * after job is scheduled so it suggest that we lack a hook for job init, currently we call + * this to trigger JobListener.onJobinit() + */ + Utils.tryAndWarn(job.getJobListener.foreach(_.onJobInited(job))) + if (logger.isDebugEnabled()) { + logger.debug( + s"After code preprocessing, the real execution code is:${jobRequest.getExecutionCode}" + ) + } + if (StringUtils.isBlank(jobRequest.getExecutionCode)) { + throw new SubmitFailedException( + SUBMIT_CODE_ISEMPTY.getErrorCode, + SUBMIT_CODE_ISEMPTY.getErrorDesc + ) + } + + Utils.tryAndWarn { + // 如果是使用优先级队列,设置下优先级 + val configMap = params + .getOrDefault(TaskConstant.PARAMS, new util.HashMap[String, AnyRef]()) + .asInstanceOf[util.Map[String, AnyRef]] + val properties: util.Map[String, AnyRef] = TaskUtils.getRuntimeMap(configMap) + val fifoStrategy: String = FIFO_QUEUE_STRATEGY + if ( + PFIFO_SCHEDULER_STRATEGY.equalsIgnoreCase( + fifoStrategy + ) && properties != null && !properties.isEmpty + ) { + val priorityValue: AnyRef = properties.get(ENGINE_PRIORITY_RUNTIME_KEY) + if (priorityValue != null) { + val value: Int = getPriority(priorityValue.toString) + logAppender.append(LogUtils.generateInfo(s"The task set priority is ${value} \n")) + job.setPriority(value) + } + } + } + Utils.tryCatch { if (logAppender.length() > 0) { job.getLogListener.foreach(_.onLogUpdate(job, logAppender.toString.trim)) @@ -144,12 +199,6 @@ abstract class EntranceServer extends Logging { logger.error("Failed to write init log, reason: ", t) } - /** - * job.afterStateChanged() method is only called in job.run(), and job.run() is called only - * after job is scheduled so it suggest that we lack a hook for job init, currently we call - * this to trigger JobListener.onJobinit() - */ - Utils.tryAndWarn(job.getJobListener.foreach(_.onJobInited(job))) getEntranceContext.getOrCreateScheduler().submit(job) val msg = LogUtils.generateInfo( s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted " @@ -159,14 +208,16 @@ abstract class EntranceServer extends Logging { job match { case entranceJob: EntranceJob => entranceJob.getJobRequest.setReqId(job.getId()) - if (jobTimeoutManager.timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { + if (timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { jobTimeoutManager.add(job.getId(), entranceJob) } entranceJob.getLogListener.foreach(_.onLogUpdate(entranceJob, msg)) case _ => } + LoggerUtils.removeJobIdMDC() job } { t => + LoggerUtils.removeJobIdMDC() job.onFailure("Submitting the query failed!(提交查询失败!)", t) val _jobRequest: JobRequest = getEntranceContext.getOrCreateEntranceParser().parseToJobRequest(job) @@ -206,7 +257,7 @@ abstract class EntranceServer extends Logging { entranceWebSocketService } else None - def getAllUndoneTask(filterWords: String): Array[EntranceJob] = { + def getAllUndoneTask(filterWords: String, ecType: String = null): Array[EntranceJob] = { val consumers = getEntranceContext .getOrCreateScheduler() .getSchedulerContext @@ -214,7 +265,14 @@ abstract class EntranceServer extends Logging { .listConsumers() .toSet val filterConsumer = if (StringUtils.isNotBlank(filterWords)) { - consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + if (StringUtils.isNotBlank(ecType)) { + consumers.filter(consumer => + consumer.getGroup.getGroupName.contains(filterWords) && consumer.getGroup.getGroupName + .contains(ecType) + ) + } else { + consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + } } else { consumers } @@ -227,6 +285,57 @@ abstract class EntranceServer extends Logging { .toArray } + /** + * to check timeout task,and kill timeout task timeout: default > 48h + */ + def startTimeOutCheck(): Unit = { + Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + override def run(): Unit = { + Utils.tryCatch { + + val timeoutType = EntranceConfiguration.ENTRANCE_TASK_TIMEOUT.getHotValue() + logger.info(s"Start to check timeout Job, timout is ${timeoutType}") + val timeoutTime = System.currentTimeMillis() - timeoutType.toLong + getAllUndoneTask(null, null).filter(job => job.createTime < timeoutTime).foreach { + job => + job.onFailure(s"Job has run for longer than the maximum time $timeoutType", null) + } + logger.info(s"Finished to check timeout Job, timout is ${timeoutType}") + } { case t: Throwable => + logger.warn(s"TimeoutDetective Job failed. ${t.getMessage}", t) + } + } + + }, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + TimeUnit.MILLISECONDS + ) + } + + if (timeoutCheck) { + logger.info("Job time check is enabled") + startTimeOutCheck() + } + + val DOT = "." + val DEFAULT_PRIORITY = 100 + + private def getPriority(value: String): Int = { + var priority: Int = -1 + Utils.tryAndWarn({ + priority = + if (value.contains(DOT)) value.substring(0, value.indexOf(DOT)).toInt else value.toInt + }) + if (priority < 0 || priority > Integer.MAX_VALUE - 1) { + logger.warn(s"illegal queue priority: ${value}") + DEFAULT_PRIORITY + } else { + priority + } + } + } object EntranceServer { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala index 714b9f0cc2c..375d7c14a72 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala @@ -215,18 +215,6 @@ class EntranceWebSocketService s"Your job's execution code is (after variable substitution and code check) " ) ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) entranceServer.getEntranceContext .getOrCreateLogManager() .onLogUpdate( @@ -376,13 +364,8 @@ class EntranceWebSocketService } private def concatLog(length: Int, log: String, flag: StringBuilder, all: StringBuilder): Unit = { - if (length == 1) { - flag ++= log ++= "\n" - all ++= log ++= "\n" - } else { - flag ++= log ++= "\n" - all ++= log ++= "\n" - } + flag ++= log ++= "\n" + all ++= log ++= "\n" } /** diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cache/GlobalConfigurationKeyValueCache.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cache/GlobalConfigurationKeyValueCache.scala deleted file mode 100644 index d4190d6ca4e..00000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cache/GlobalConfigurationKeyValueCache.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.cache - -import org.apache.linkis.common.conf.Configuration -import org.apache.linkis.governance.common.entity.job.JobRequest -import org.apache.linkis.governance.common.protocol.conf.{ - RequestQueryGlobalConfig, - ResponseQueryConfig -} -import org.apache.linkis.protocol.CacheableProtocol -import org.apache.linkis.rpc.RPCMapCache - -import java.util - -object GlobalConfigurationKeyValueCache - extends RPCMapCache[JobRequest, String, String]( - Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue - ) { - - override protected def createRequest(jobReq: JobRequest): CacheableProtocol = - RequestQueryGlobalConfig(jobReq.getExecuteUser) - - override protected def createMap(any: Any): util.Map[String, String] = any match { - case response: ResponseQueryConfig => response.getKeyAndValue - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala index 5c61ce0b3b1..c129c37a8df 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala @@ -30,20 +30,6 @@ object EntranceConfiguration { val JOB_MAX_PERSIST_WAIT_TIME = CommonVars("wds.linkis.entrance.job.persist.wait.max", new TimeType("5m")) - val MULTI_ENTRANCE_CONDITION = CommonVars("wds.linkis.entrance.multi.entrance.flag", true) - - val JOBHISTORY_SPRING_APPLICATION_NAME = - CommonVars("wds.linkis.jobhistory.application.name", "linkis-ps-jobhistory") - - /** - * DEFAULT_LOGPATH_PREFIX is the prefix that represents the default log storage path - * DEFAULT_LOGPATH_PREFIX 是表示默认的日志存储路径的前缀 - */ - val DEFAULT_LOGPATH_PREFIX = CommonVars[String]( - "wds.linkis.entrance.config.log.path", - CommonVars[String]("wds.linkis.filesystem.hdfs.root.path").getValue - ) - /** * Default_Cache_Max is used to specify the size of the LoopArray of the CacheLogWriter * Default_Cache_Max 是用来指定CacheLogWriter的LoopArray的大小 @@ -96,6 +82,9 @@ object EntranceConfiguration { */ val WDS_LINKIS_INSTANCE = CommonVars("wds.linkis.rm.instance", 10) + val WDS_LINKIS_ENTRANCE_RUNNING_JOB = + CommonVars("wds.linkis.engine.running.job.max", WDS_LINKIS_INSTANCE.getValue) + val ENTRANCE_INSTANCE_MIN = CommonVars("wds.linkis.entrance.runningjob.min", 5) val LOG_EXCLUDE_ALL = CommonVars("wds.linkis.log.exclude.all", "com.netflix") @@ -215,12 +204,206 @@ object EntranceConfiguration { val GROUP_CACHE_EXPIRE_TIME = CommonVars("wds.linkis.consumer.group.expire.time", 50) val CLIENT_MONITOR_CREATOR = - CommonVars("wds.linkis.entrance.client.monitor.creator", "LINKISCLI") + CommonVars("wds.linkis.entrance.client.monitor.creator", "LINKISCLI,BdpClient") val CREATOR_IP_SWITCH = CommonVars("wds.linkis.entrance.user.creator.ip.interceptor.switch", false) - val ENABLE_ENTRANCE_DIRTY_DATA_CLEAR = - CommonVars("linkis.entrance.auto.clean.dirty.data.enable", true) + val TEMPLATE_CONF_SWITCH = + CommonVars("wds.linkis.entrance.template.conf.interceptor.switch", true) + + val TEMPLATE_CONF_ADD_ONCE_LABEL_ENABLE = + CommonVars("wds.linkis.entrance.template.add.once.label.enable", false) + + val SUPPORT_TEMPLATE_CONF_RETRY_ENABLE = + CommonVars("linkis.entrance.template.retry.enable", false) + + val ENABLE_ENTRANCE_DIRTY_DATA_CLEAR: CommonVars[Boolean] = + CommonVars[Boolean]("linkis.entrance.auto.clean.dirty.data.enable", true) + + val ENTRANCE_CREATOR_JOB_LIMIT: CommonVars[Int] = + CommonVars[Int]( + "linkis.entrance.creator.job.concurrency.limit", + 10000, + "Creator task concurrency limit parameters" + ) + + val ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE = + CommonVars("linkis.entrance.creator.job.concurrency.limit.conf.cache.time", 30L) + + val ENTRANCE_TASK_TIMEOUT = + CommonVars("linkis.entrance.task.timeout", new TimeType("48h")) + + val ENTRANCE_TASK_TIMEOUT_SCAN = + CommonVars("linkis.entrance.task.timeout.scan", new TimeType("12h")) + + val ENABLE_HDFS_JVM_USER = + CommonVars[Boolean]("linkis.entrance.enable.hdfs.jvm.user", true).getValue + + val ENTRANCE_FAILOVER_ENABLED = CommonVars("linkis.entrance.failover.enable", false).getValue + + val ENTRANCE_FAILOVER_SCAN_INIT_TIME = + CommonVars("linkis.entrance.failover.scan.init.time", 3 * 1000).getValue + + val ENTRANCE_FAILOVER_SCAN_INTERVAL = + CommonVars("linkis.entrance.failover.scan.interval", 30 * 1000).getValue + + val ENTRANCE_FAILOVER_DATA_NUM_LIMIT = + CommonVars("linkis.entrance.failover.data.num.limit", 10).getValue + + val ENTRANCE_FAILOVER_DATA_INTERVAL_TIME = + CommonVars("linkis.entrance.failover.data.interval.time", new TimeType("1d").toLong).getValue + + // if true, the waitForRetry job in runningJobs can be failover + val ENTRANCE_FAILOVER_RETRY_JOB_ENABLED = + CommonVars("linkis.entrance.failover.retry.job.enable", false) + + val ENTRANCE_UPDATE_BATCH_SIZE = CommonVars("linkis.entrance.update.batch.size", 100) + + // if true, the job in ConsumeQueue can be failover + val ENTRANCE_SHUTDOWN_FAILOVER_CONSUME_QUEUE_ENABLED = + CommonVars("linkis.entrance.shutdown.failover.consume.queue.enable", false).getValue + + val ENTRANCE_GROUP_SCAN_ENABLED = CommonVars("linkis.entrance.group.scan.enable", false) + + val ENTRANCE_GROUP_SCAN_INIT_TIME = CommonVars("linkis.entrance.group.scan.init.time", 3 * 1000) + + val ENTRANCE_GROUP_SCAN_INTERVAL = CommonVars("linkis.entrance.group.scan.interval", 60 * 1000) + + val ENTRANCE_FAILOVER_RETAIN_METRIC_ENGINE_CONN_ENABLED = + CommonVars("linkis.entrance.failover.retain.metric.engine.conn.enable", false) + + val ENTRANCE_FAILOVER_RETAIN_METRIC_YARN_RESOURCE_ENABLED = + CommonVars("linkis.entrance.failover.retain.metric.yarn.resource.enable", false) + + // if true, job whose status is running will be set to Cancelled + val ENTRANCE_FAILOVER_RUNNING_KILL_ENABLED = + CommonVars("linkis.entrance.failover.running.kill.enable", false) + + val LINKIS_ENTRANCE_SKIP_ORCHESTRATOR = + CommonVars("linkis.entrance.skip.orchestrator", false).getValue + + val ENABLE_HDFS_RES_DIR_PRIVATE = + CommonVars[Boolean]("linkis.entrance.enable.hdfs.res.dir.private", false).getValue + + val UNSUPPORTED_RETRY_CODES = + CommonVars("linkis.entrance.unsupported.retry.codes", "NOCODE").getValue + + val SUPPORTED_RETRY_ERROR_CODES = + CommonVars( + "linkis.entrance.supported.retry.error.codes", + "01002,01003,13005,13006,13012" + ).getValue + + val SUPPORTED_RETRY_ERROR_DESC = + CommonVars( + "linkis.entrance.supported.retry.error.desc", + "Spark application has already stopped,Spark application sc has already stopped,Failed to allocate a page,dataFrame to local exception,org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator" + ).getValue + + val SUPPORT_ADD_RETRY_CODE_KEYS = + CommonVars( + "linkis.entrance.supported.add.retry.code.keys", + "dataFrame to local exception,org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator" + ).getValue + + val TASK_RETRY_ENABLED: Boolean = + CommonVars[Boolean]("linkis.task.retry.enabled", true).getValue + + val AI_SQL_DEFAULT_SPARK_ENGINE_TYPE: String = + CommonVars[String]("linkis.ai.sql.default.spark.engine.type", "spark-3.4.4").getValue + + val AI_SQL_DEFAULT_HIVE_ENGINE_TYPE: String = + CommonVars[String]("linkis.ai.sql.default.hive.engine.type", "hive-2.3.3").getValue + + val AI_SQL_HIVE_TEMPLATE_KEYS: String = + CommonVars[String]("linkis.ai.sql.hive.template.keys", "hive,mapreduce").getValue + + val AI_SQL_CREATORS: String = + CommonVars[String]("linkis.ai.sql.support.creators", "IDE,MCP").getValue + + val AI_SQL_KEY: CommonVars[String] = + CommonVars[String]("linkis.ai.sql.enable", "true") + + val RETRY_NUM_KEY: CommonVars[Int] = + CommonVars[Int]("linkis.ai.retry.num", 1) + + val AI_SQL_RETRY_ONCE: CommonVars[Boolean] = + CommonVars[Boolean]("linkis.ai.sql.once.enable", true) + + val SPARK3_VERSION_COERCION_USERS: String = + CommonVars[String]("spark.version.coercion.users", "").getHotValue() + + val SPARK3_VERSION_COERCION_DEPARTMENT: String = + CommonVars[String]("spark.version.coercion.department.id", "").getHotValue() + + val SPARK3_VERSION_COERCION_SWITCH: Boolean = + CommonVars[Boolean]("spark.version.coercion.switch", false).getValue + + val PYTHON_SAFE_CHECK_SWITCH = CommonVars("linkis.python.safe.check.switch", false).getValue + + val DOCTOR_URL = CommonVars("linkis.doctor.url", "").getValue + + val DOCTOR_DYNAMIC_ENGINE_URL = CommonVars( + "linkis.aisql.doctor.api", + "/api/v1/external/engine/diagnose?app_id=$app_id×tamp=$timestamp&nonce=$nonce&signature=$signature" + ).getValue + + val DOCTOR_ENCRYPT_SQL_URL = CommonVars( + "linkis.encrypt.doctor.api", + "/api/v1/external/plaintext/diagnose?app_id=$app_id×tamp=$timestamp&nonce=$nonce&signature=$signature" + ).getValue + + val DOCTOR_SIGNATURE_TOKEN = CommonVars("linkis.doctor.signature.token", "").getValue + + val DOCTOR_NONCE = CommonVars.apply("linkis.doctor.signature.nonce", "").getValue + + val LINKIS_SYSTEM_NAME = CommonVars("linkis.system.name", "").getValue + + val DOCTOR_CLUSTER = CommonVars("linkis.aisql.doctor.cluster", "").getValue + + val AI_SQL_DYNAMIC_ENGINE_SWITCH = + CommonVars("linkis.aisql.dynamic.engine.type.switch", false).getValue + + val DOCTOR_REQUEST_TIMEOUT = CommonVars("linkis.aisql.doctor.http.timeout", 30000).getValue + + val DOCTOR_HTTP_MAX_CONNECT = CommonVars("linkis.aisql.doctor.http.max.connect", 20).getValue + + val SPARK_EXECUTOR_CORES = CommonVars.apply("spark.executor.cores", "2"); + + var SPARK_EXECUTOR_MEMORY = CommonVars.apply("spark.executor.memory", "6G"); + + var SPARK_DYNAMIC_ALLOCATION_MAX_EXECUTORS = + CommonVars.apply("spark.dynamicAllocation.maxExecutors", "50"); + + var SPARK_EXECUTOR_INSTANCES = CommonVars.apply("spark.executor.instances", "1"); + + var SPARK_EXECUTOR_MEMORY_OVERHEAD = CommonVars.apply("spark.executor.memoryOverhead", "2G"); + + var SPARK3_PYTHON_VERSION = CommonVars.apply("spark.python.version", "python3"); + + var SPARK_DYNAMIC_ALLOCATION_ENABLED = + CommonVars.apply("spark.dynamic.allocation.enabled", false).getValue + + var SPARK_DYNAMIC_ALLOCATION_ADDITIONAL_CONFS = + CommonVars.apply("spark.dynamic.allocation.additional.confs", "").getValue + + var DOCTOR_SENSITIVE_SQL_CHECK_SWITCH = + CommonVars[Boolean]("linkis.doctor.sensitive.sql.check.switch", false).getValue + + var DOCTOR_SENSITIVE_SQL_CHECK_RUNTYPE = + CommonVars[String]("linkis.doctor.sensitive.sql.check.run.Type", "sql,python").getValue + + var DOCTOR_SENSITIVE_SQL_CHECK_CREATOR = + CommonVars[String]("linkis.doctor.sensitive.sql.check.creator", "").getValue + + var DOCTOR_SENSITIVE_SQL_CHECK_DEPARTMENT = + CommonVars[String]("linkis.doctor.sensitive.sql.check.department", "").getValue + + var DOCTOR_SENSITIVE_SQL_CHECK_WHITELIST = + CommonVars[String]("linkis.doctor.sensitive.sql.check.whitelist", "").getValue + + var DOCTOR_SENSITIVE_SQL_CHECK_ENGINETYPE = + CommonVars[String]("linkis.doctor.sensitive.sql.check.engine.type", "hive,spark").getValue } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala index c509f100563..b63734279c1 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala @@ -20,11 +20,13 @@ package org.apache.linkis.entrance.execute import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} -import org.apache.linkis.entrance.job.EntranceExecuteRequest +import org.apache.linkis.entrance.job.{EntranceExecuteRequest, EntranceExecutionJob} import org.apache.linkis.entrance.orchestrator.EntranceOrchestrationFactory import org.apache.linkis.entrance.utils.JobHistoryHelper import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.protocol.task.ResponseTaskStatus +import org.apache.linkis.governance.common.utils.LoggerUtils +import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel import org.apache.linkis.manager.label.utils.LabelUtil @@ -46,6 +48,7 @@ import org.apache.linkis.orchestrator.execution.{ import org.apache.linkis.orchestrator.execution.impl.DefaultFailedTaskResponse import org.apache.linkis.orchestrator.plans.unit.CodeLogicalUnit import org.apache.linkis.protocol.constants.TaskConstant +import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.scheduler.executer._ import org.apache.linkis.server.BDPJettyServerHelper @@ -54,6 +57,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.util import java.util.Date +import scala.collection.JavaConverters.mapAsScalaMapConverter + class DefaultEntranceExecutor(id: Long) extends EntranceExecutor(id) with SingleTaskOperateSupport @@ -128,35 +133,10 @@ class DefaultEntranceExecutor(id: Long) null != arrayResultSetPathResp.getResultSets && arrayResultSetPathResp.getResultSets.length > 0 ) { val resultsetSize = arrayResultSetPathResp.getResultSets.length - entranceExecuteRequest.getJob.setResultSize(resultsetSize) entranceExecuteRequest.getJob .asInstanceOf[EntranceJob] .addAndGetResultSize(resultsetSize) } - val firstResultSet = arrayResultSetPathResp.getResultSets.headOption.orNull - if (null != firstResultSet) { - // assert that all result set files have same parent path, so we get the first - Utils.tryCatch { - entranceExecuteRequest.getJob - .asInstanceOf[EntranceJob] - .getEntranceContext - .getOrCreatePersistenceManager() - .onResultSetCreated( - entranceExecuteRequest.getJob, - AliasOutputExecuteResponse(firstResultSet.alias, firstResultSet.result) - ) - } { case e: Exception => - val msg = s"Persist resultSet error. ${e.getMessage}" - logger.error(msg) - val errorExecuteResponse = new DefaultFailedTaskResponse( - msg, - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, - e - ) - dealResponse(errorExecuteResponse, entranceExecuteRequest, orchestration) - return - } - } case _ => logger.info( s"JobRequest : ${entranceExecuteRequest.jobId()} succeed to execute task,no result." @@ -228,21 +208,77 @@ class DefaultEntranceExecutor(id: Long) orchestration: Orchestration, failedResponse: FailedTaskResponse ) = { - val msg = failedResponse.getErrorCode + ", " + failedResponse.getErrorMsg - getEngineExecuteAsyncReturn.foreach { jobReturn => - jobReturn.notifyError(msg, failedResponse.getCause) - jobReturn.notifyStatus( - ResponseTaskStatus(entranceExecuteRequest.getJob.getId, ExecutionNodeStatus.Failed) + val msg: String = failedResponse.getErrorCode + ", " + failedResponse.getErrorMsg + var canRetry = false + val props: util.Map[String, AnyRef] = entranceExecuteRequest.properties() + val job: EntranceExecutionJob = entranceExecuteRequest.getJob + job.getJobRetryListener.foreach(listener => { + canRetry = listener.onJobFailed( + entranceExecuteRequest.getJob, + entranceExecuteRequest.code(), + props, + failedResponse.getErrorCode, + failedResponse.getErrorMsg ) + }) + // 无法重试,更新失败状态 + if (canRetry) { + // 可以重试,重置任务进度为0 + logger.info(s"task: ${job.getId} reset progress from ${job.getProgress} to 0.0") + job.getProgressListener.foreach(_.onProgressUpdate(job, 0.0f, null)) + + // 如果有模板参数,则需要按模板参数重启动引擎 + val params: util.Map[String, AnyRef] = entranceExecuteRequest.getJob.getJobRequest.getParams + val runtimeMap: util.Map[String, AnyRef] = TaskUtils.getRuntimeMap(params) + val startMap: util.Map[String, AnyRef] = TaskUtils.getStartupMap(params) + if (runtimeMap.containsKey(LabelKeyConstant.TEMPLATE_CONF_NAME_KEY)) { + val tempConf: AnyRef = runtimeMap + .getOrDefault(LabelKeyConstant.TEMPLATE_CONF_NAME_KEY, new util.HashMap[String, AnyRef]()) + tempConf match { + case map: util.HashMap[String, AnyRef] => + map.asScala.foreach { case (key, value) => + // 保留原有已经设置的spark3相关参数 + if (!startMap.containsKey(key)) { + startMap.put(key, value) + } + } + case _ => + } + } + + // 处理失败任务 + failedResponse match { + case rte: DefaultFailedTaskResponse => + if (rte.errorIndex >= 0) { + logger.info(s"tasks execute error with error index: ${rte.errorIndex}") + val newParams: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]() + newParams.put("execute.error.code.index", rte.errorIndex.toString) + LogUtils.generateInfo( + s"tasks execute error with error index: ${rte.errorIndex} and will retry." + ) + TaskUtils.addRuntimeMap(props, newParams) + } + case _ => + } + } else { + logger.debug(s"task execute Failed with : ${msg}") + getEngineExecuteAsyncReturn.foreach { jobReturn => + jobReturn.notifyError(msg, failedResponse.getCause) + jobReturn.notifyStatus( + ResponseTaskStatus(entranceExecuteRequest.getJob.getId, ExecutionNodeStatus.Failed) + ) + } } } override def kill(): Boolean = { + LoggerUtils.setJobIdMDC(getId.toString) logger.info("Entrance start to kill job {} invoke Orchestrator ", this.getId) Utils.tryAndWarn { val msg = s"You job with id was cancelled by user!" getRunningOrchestrationFuture.foreach(_.cancel(msg)) } + LoggerUtils.removeJobIdMDC() true } @@ -277,7 +313,10 @@ class DefaultEntranceExecutor(id: Long) val msg = s"JobRequest (${entranceExecuteRequest.jobId()}) was submitted to Orchestrator." logger.info(msg) entranceExecuteRequest.getJob.getLogListener.foreach( - _.onLogUpdate(entranceExecuteRequest.getJob, LogUtils.generateInfo(msg)) + _.onLogUpdate( + entranceExecuteRequest.getJob, + LogUtils.generateInfo(msg + "(您的任务已经提交给Orchestrator进行编排执行)") + ) ) if (entranceExecuteRequest.getJob.getJobRequest.getMetrics == null) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala index 44cb3620ced..be7fb13871f 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala @@ -19,9 +19,11 @@ package org.apache.linkis.entrance.execute import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.job.EntranceExecuteRequest import org.apache.linkis.governance.common.entity.ExecutionNodeStatus._ import org.apache.linkis.governance.common.protocol.task.{RequestTask, ResponseTaskStatus} +import org.apache.linkis.governance.common.utils.LoggerUtils import org.apache.linkis.orchestrator.computation.operation.log.LogProcessor import org.apache.linkis.orchestrator.computation.operation.progress.ProgressProcessor import org.apache.linkis.orchestrator.computation.operation.resource.ResourceReportProcessor @@ -29,6 +31,7 @@ import org.apache.linkis.orchestrator.core.OrchestrationFuture import org.apache.linkis.protocol.UserWithCreator import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.executer.ExecutorState.ExecutorState +import org.apache.linkis.server.BDPJettyServerHelper import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils @@ -69,9 +72,14 @@ abstract class EntranceExecutor(val id: Long) extends Executor with Logging { } override def execute(executeRequest: ExecuteRequest): ExecuteResponse = { - var request: RequestTask = null - interceptors.foreach(in => request = in.apply(request, executeRequest)) - callExecute(executeRequest) + LoggerUtils.setJobIdMDC(getId.toString) + Utils.tryFinally { + var request: RequestTask = null + interceptors.foreach(in => request = in.apply(request, executeRequest)) + callExecute(executeRequest) + } { + LoggerUtils.removeJobIdMDC() + } } protected def callback(): Unit = {} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala index 0d5d605983b..f4e8f51539c 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala @@ -18,10 +18,11 @@ package org.apache.linkis.entrance.execute import org.apache.linkis.common.exception.WarnException -import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.EntranceErrorException +import org.apache.linkis.entrance.execute.simple.{SimpleEntranceExecutor, SimpleExecuteBusContext} import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.linkis.scheduler.executer.{Executor, ExecutorManager} import org.apache.linkis.scheduler.queue.{GroupFactory, Job, SchedulerEvent} @@ -37,8 +38,6 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory) private val idGenerator = new AtomicLong(0) - def getOrCreateInterceptors(): Array[ExecuteRequestInterceptor] - override def delete(executor: Executor): Unit = { if (null != executor) { executor.close() @@ -90,22 +89,15 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory) job.getJobRequest match { case jobReq: JobRequest => val entranceEntranceExecutor = - new DefaultEntranceExecutor(idGenerator.incrementAndGet()) - // getEngineConn Executor - job.getLogListener.foreach( - _.onLogUpdate( - job, - LogUtils.generateInfo("Your job is being scheduled by orchestrator.") - ) - ) + if (EntranceConfiguration.LINKIS_ENTRANCE_SKIP_ORCHESTRATOR) { + new SimpleEntranceExecutor( + jobReq.getId, + SimpleExecuteBusContext.getOrchestratorListenerBusContext() + ) + } else { + new DefaultEntranceExecutor(jobReq.getId) + } jobReq.setUpdatedTime(new Date(System.currentTimeMillis())) - - /** - * // val engineConnExecutor = engineConnManager.getAvailableEngineConnExecutor(mark) - * idToEngines.put(entranceEntranceExecutor.getId, entranceEntranceExecutor) - */ -// instanceToEngines.put(engineConnExecutor.getServiceInstance.getInstance, entranceEntranceExecutor) // todo -// entranceEntranceExecutor.setInterceptors(getOrCreateInterceptors()) // todo entranceEntranceExecutor case _ => throw new EntranceErrorException( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala index b762f546053..7dda86e5a26 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala @@ -160,7 +160,7 @@ abstract class EntranceJob extends Job { getLogListener.foreach( _.onLogUpdate( this, - LogUtils.generateInfo("Your job is Scheduled. Please wait it to run.") + LogUtils.generateInfo("Your job is Scheduled. Please wait it to run.(您的任务已经调度运行中)") ) ) case WaitForRetry => @@ -174,7 +174,8 @@ abstract class EntranceJob extends Job { getLogListener.foreach( _.onLogUpdate( this, - LogUtils.generateInfo("Your job is Running now. Please wait it to complete.") + LogUtils + .generateInfo("Your job is Running now. Please wait it to complete.(您的任务已经在运行中)") ) ) getJobRequest.getMetrics.put( @@ -264,6 +265,11 @@ abstract class EntranceJob extends Job { transitionCompleted(executeCompleted) } + def transitionWaitForRetry(reason: String): Unit = { + logger.debug("Job failed with reason: " + reason) + super.transitionWaitForRetry() + } + override protected def isJobShouldRetry(errorExecuteResponse: ErrorExecuteResponse): Boolean = isJobSupportRetry && errorExecuteResponse != null && (if (RPCUtils.isReceiverNotExists(errorExecuteResponse.t)) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala index 82305572984..7946da40d1a 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala @@ -65,42 +65,3 @@ object JobExecuteRequestInterceptor extends ExecuteRequestInterceptor { } } - -object ReconnectExecuteRequestInterceptor extends ExecuteRequestInterceptor { - val PROPERTY_EXEC_ID = "execId" - - override def apply(requestTask: RequestTask, executeRequest: ExecuteRequest): RequestTask = - executeRequest match { - case reconnect: ReconnectExecuteRequest => - requestTask.data(PROPERTY_EXEC_ID, reconnect.execId) - requestTask - case _ => requestTask - } - -} - -object StorePathExecuteRequestInterceptor extends ExecuteRequestInterceptor { - - override def apply(requestTask: RequestTask, executeRequest: ExecuteRequest): RequestTask = - executeRequest match { - case storePath: StorePathExecuteRequest => - requestTask.data(RequestTask.RESULT_SET_STORE_PATH, storePath.storePath) - requestTask - case _ => requestTask - } - -} - -object RuntimePropertiesExecuteRequestInterceptor extends ExecuteRequestInterceptor { - - override def apply(requestTask: RequestTask, executeRequest: ExecuteRequest): RequestTask = - executeRequest match { - case runtime: RuntimePropertiesExecuteRequest => - runtime.properties.asScala.foreach { case (k, v) => - requestTask.data(k, v) - } - requestTask - case _ => requestTask - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala index 3efcf41c891..57de9cc0cbf 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/impl/EntranceExecutorManagerImpl.scala @@ -22,16 +22,8 @@ import org.apache.linkis.orchestrator.ecm.EngineConnManager import org.apache.linkis.scheduler.listener.ExecutorListener import org.apache.linkis.scheduler.queue.GroupFactory -class EntranceExecutorManagerImpl(groupFactory: GroupFactory, engineConnManager: EngineConnManager) +class EntranceExecutorManagerImpl(groupFactory: GroupFactory) extends EntranceExecutorManager(groupFactory) { - override def getOrCreateInterceptors(): Array[ExecuteRequestInterceptor] = Array( - JobExecuteRequestInterceptor, - LabelExecuteRequestInterceptor, - ReconnectExecuteRequestInterceptor, - StorePathExecuteRequestInterceptor, - RuntimePropertiesExecuteRequestInterceptor - ) - override def setExecutorListener(engineListener: ExecutorListener): Unit = {} } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleASyncListener.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleASyncListener.scala new file mode 100644 index 00000000000..bc52fbd800c --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleASyncListener.scala @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.execute.simple + +import org.apache.linkis.common.listener.Event +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.entrance.EntranceServer +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.orchestrator.listener.OrchestratorAsyncEvent +import org.apache.linkis.orchestrator.listener.task.{ + TaskLogEvent, + TaskLogListener, + TaskProgressListener, + TaskRunningInfoEvent +} + +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + +import javax.annotation.PostConstruct + +@Component +class SimpleASyncListener extends TaskLogListener with TaskProgressListener with Logging { + + @Autowired private var entranceServer: EntranceServer = _ + + @PostConstruct + def init(): Unit = { + if (EntranceConfiguration.LINKIS_ENTRANCE_SKIP_ORCHESTRATOR) { + SimpleExecuteBusContext + .getOrchestratorListenerBusContext() + .getOrchestratorAsyncListenerBus + .addListener(this) + } + } + + override def onLogUpdate(taskLogEvent: TaskLogEvent): Unit = {} + + override def onProgressOn(taskProgressEvent: TaskRunningInfoEvent): Unit = {} + + override def onEvent(event: OrchestratorAsyncEvent): Unit = {} + + override def onEventError(event: Event, t: Throwable): Unit = {} +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleEntranceExecutor.scala new file mode 100644 index 00000000000..d9e18081d23 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleEntranceExecutor.scala @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.execute.simple + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} +import org.apache.linkis.entrance.execute.{EngineExecuteAsyncReturn, EntranceExecutor} +import org.apache.linkis.entrance.job.EntranceExecuteRequest +import org.apache.linkis.governance.common.utils.LoggerUtils +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel +import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.orchestrator.code.plans.ast.CodeJob +import org.apache.linkis.orchestrator.code.plans.logical.CodeLogicalUnitTaskDesc +import org.apache.linkis.orchestrator.computation.entity.ComputationJobReq +import org.apache.linkis.orchestrator.computation.physical.CodeLogicalUnitExecTask +import org.apache.linkis.orchestrator.converter.ASTContextImpl +import org.apache.linkis.orchestrator.execution.{ + AsyncTaskResponse, + FailedTaskResponse, + SucceedTaskResponse +} +import org.apache.linkis.orchestrator.listener.OrchestratorListenerBusContext +import org.apache.linkis.orchestrator.plans.physical.{ExecTask, PhysicalContextImpl} +import org.apache.linkis.orchestrator.plans.unit.CodeLogicalUnit +import org.apache.linkis.scheduler.executer._ + +import java.util + +class SimpleEntranceExecutor( + id: Long, + orchestratorListenerBusContext: OrchestratorListenerBusContext +) extends EntranceExecutor(id) + with SingleTaskOperateSupport + with Logging { + + private var codeUnitExecTask: CodeLogicalUnitExecTask = null + + override protected def callExecute(request: ExecuteRequest): ExecuteResponse = { + val entranceExecuteRequest: EntranceExecuteRequest = request match { + case request: EntranceExecuteRequest => + request + case _ => + throw new EntranceErrorException( + EntranceErrorCode.EXECUTE_REQUEST_INVALID.getErrCode, + s"Invalid entranceExecuteRequest : ${request.code}" + ) + } + // 1. create JobReq + val computationJobReq = requestToComputationJobReq(entranceExecuteRequest) + // 2. create code job + val codeJob = new CodeJob(null, null) + val astContext = ASTContextImpl.newBuilder().setJobReq(computationJobReq).build() + codeJob.setAstContext(astContext) + codeJob.setCodeLogicalUnit(computationJobReq.getCodeLogicalUnit) + codeJob.setParams(computationJobReq.getParams) + codeJob.setName(computationJobReq.getName + "_Job") + codeJob.setSubmitUser(computationJobReq.getSubmitUser) + codeJob.setExecuteUser(computationJobReq.getExecuteUser) + codeJob.setLabels(computationJobReq.getLabels) + codeJob.setPriority(computationJobReq.getPriority) + codeUnitExecTask = new CodeLogicalUnitExecTask(Array[ExecTask](), Array[ExecTask]()) + // set job id, can find by getEntranceContext.getOrCreateScheduler().get(execId).map(_.asInstanceOf[Job]) + codeUnitExecTask.setId(entranceExecuteRequest.getJob.getId) + // 3.set code unit + codeUnitExecTask.setCodeLogicalUnit(computationJobReq.getCodeLogicalUnit) + codeUnitExecTask.setTaskDesc(CodeLogicalUnitTaskDesc(codeJob)) + // 4. set context + val context = new PhysicalContextImpl(codeUnitExecTask, Array.empty) + context.setSyncBus(orchestratorListenerBusContext.getOrchestratorSyncListenerBus) + context.setAsyncBus(orchestratorListenerBusContext.getOrchestratorAsyncListenerBus) + // 5. execute + val response = codeUnitExecTask.execute() + response match { + case async: AsyncTaskResponse => + new EngineExecuteAsyncReturn(request, null) + case succeed: SucceedTaskResponse => + logger.info(s"Succeed to execute ExecTask(${getId})") + SuccessExecuteResponse() + case failedTaskResponse: FailedTaskResponse => + logger.info(s"Failed to execute ExecTask(${getId})") + ErrorExecuteResponse(failedTaskResponse.getErrorMsg, failedTaskResponse.getCause) + case _ => + logger.warn(s"ExecTask(${getId}) need to retry") + ErrorExecuteResponse("unknown response: " + response, null) + } + } + + def requestToComputationJobReq( + entranceExecuteRequest: EntranceExecuteRequest + ): ComputationJobReq = { + val jobReqBuilder = ComputationJobReq.newBuilder() + jobReqBuilder.setId(entranceExecuteRequest.jobId()) + jobReqBuilder.setSubmitUser(entranceExecuteRequest.submitUser()) + jobReqBuilder.setExecuteUser(entranceExecuteRequest.executeUser()) + val codeTypeLabel: Label[_] = LabelUtil.getCodeTypeLabel(entranceExecuteRequest.getLabels) + if (null == codeTypeLabel) { + throw new EntranceErrorException( + EntranceErrorCode.EXECUTE_REQUEST_INVALID.getErrCode, + s"code Type Label is needed" + ) + } + val codes = new util.ArrayList[String]() + codes.add(entranceExecuteRequest.code()) + val codeLogicalUnit = + new CodeLogicalUnit(codes, codeTypeLabel.asInstanceOf[CodeLanguageLabel]) + jobReqBuilder.setCodeLogicalUnit(codeLogicalUnit) + jobReqBuilder.setLabels(entranceExecuteRequest.getLabels) + jobReqBuilder.setExecuteUser(entranceExecuteRequest.executeUser()) + jobReqBuilder.setParams(entranceExecuteRequest.properties()) + jobReqBuilder.build().asInstanceOf[ComputationJobReq] + } + + override def kill(): Boolean = { + LoggerUtils.setJobIdMDC(getId.toString) + logger.info("Entrance start to kill job {} invoke Orchestrator ", this.getId) + Utils.tryAndWarn { + if (null != codeUnitExecTask) { + codeUnitExecTask.kill() + } + } + LoggerUtils.removeJobIdMDC() + true + } + + override def pause(): Boolean = { + true + } + + override def resume(): Boolean = { + true + } + + override def close(): Unit = { + getEngineExecuteAsyncReturn.foreach { e => + e.notifyError(s"$toString has already been completed with state $state.") + } + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleExecuteBusContext.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleExecuteBusContext.scala new file mode 100644 index 00000000000..6f2798a52c8 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleExecuteBusContext.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.execute.simple +import org.apache.linkis.orchestrator.listener.OrchestratorListenerBusContext + +object SimpleExecuteBusContext { + + private lazy val orchestratorListenerBusContext = OrchestratorListenerBusContext.createBusContext + + def getOrchestratorListenerBusContext(): OrchestratorListenerBusContext = + orchestratorListenerBusContext + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleSyncListener.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleSyncListener.scala new file mode 100644 index 00000000000..46107ff7017 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/simple/SimpleSyncListener.scala @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.execute.simple + +import org.apache.linkis.common.listener.Event +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.entrance.EntranceServer +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.orchestrator.listener.OrchestratorSyncEvent +import org.apache.linkis.orchestrator.listener.task.{ + TaskErrorResponseEvent, + TaskResultSetEvent, + TaskResultSetListener, + TaskResultSetSizeEvent, + TaskStatusEvent, + TaskStatusListener +} + +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + +import javax.annotation.PostConstruct + +/** + * 1.TaskLogListener: job.getLogListener.foreach(_.onLogUpdate(job, logEvent.log)) + * + * 2.TaskProgressListener: entranceJob.getProgressListener.foreach( _.onProgressUpdate(entranceJob, + * progressInfoEvent.progress, entranceJob.getProgressInfo) + * + * 3.TaskResultSetListener entranceContext.getOrCreatePersistenceManager().onResultSizeCreated(j, + * taskResultSize.resultSize) .getOrCreatePersistenceManager() .onResultSetCreated( + * entranceExecuteRequest.getJob, AliasOutputExecuteResponse(firstResultSet.alias, + * firstResultSet.result) ) + * + * 4. TaskStatusListener getEngineExecuteAsyncReturn.foreach { jobReturn => jobReturn.notifyStatus( + * ResponseTaskStatus(entranceExecuteRequest.getJob.getId, ExecutionNodeStatus.Succeed) ) } val msg + * = failedResponse.getErrorCode + ", " + failedResponse.getErrorMsg + * getEngineExecuteAsyncReturn.foreach { jobReturn => jobReturn.notifyError(msg, + * failedResponse.getCause) jobReturn.notifyStatus( + * ResponseTaskStatus(entranceExecuteRequest.getJob.getId, ExecutionNodeStatus.Failed) ) } + */ +@Component +class SimpleSyncListener extends TaskStatusListener with TaskResultSetListener with Logging { + + @Autowired private var entranceServer: EntranceServer = _ + + @PostConstruct + def init(): Unit = { + if (EntranceConfiguration.LINKIS_ENTRANCE_SKIP_ORCHESTRATOR) { + SimpleExecuteBusContext + .getOrchestratorListenerBusContext() + .getOrchestratorSyncListenerBus + .addListener(this) + } + } + + override def onStatusUpdate(taskStatusEvent: TaskStatusEvent): Unit = {} + + override def onTaskErrorResponseEvent(taskErrorResponseEvent: TaskErrorResponseEvent): Unit = {} + + override def onResultSetCreate(taskResultSetEvent: TaskResultSetEvent): Unit = {} + + override def onResultSizeCreated(taskResultSetSizeEvent: TaskResultSetSizeEvent): Unit = {} + + override def onSyncEvent(event: OrchestratorSyncEvent): Unit = {} + + override def onEventError(event: Event, t: Throwable): Unit = {} +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/OnceJobInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/OnceJobInterceptor.scala index 1291a8566cb..9b05789800c 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/OnceJobInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/OnceJobInterceptor.scala @@ -94,8 +94,8 @@ class OnceJobInterceptor extends EntranceInterceptor { s"/tmp/${task.getExecuteUser}/${task.getId}" protected def getJobContent(task: JobRequest): util.Map[String, AnyRef] = { - // TODO Wait for optimizing since the class `JobRequest` is waiting for optimizing . val jobContent = new util.HashMap[String, AnyRef] + jobContent.putAll(TaskUtils.getStartupMap(task.getParams)) jobContent.put(TaskConstant.CODE, task.getExecutionCode) task.getLabels.foreach { case label: CodeLanguageLabel => diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/AISQLTransformInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/AISQLTransformInterceptor.scala new file mode 100644 index 00000000000..b457287e3fd --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/AISQLTransformInterceptor.scala @@ -0,0 +1,182 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.log.LogUtils +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.utils.CodeAndRunTypeUtils.LANGUAGE_TYPE_AI_SQL +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.conf.EntranceConfiguration._ +import org.apache.linkis.entrance.interceptor.EntranceInterceptor +import org.apache.linkis.entrance.utils.EntranceUtils +import org.apache.linkis.governance.common.entity.job.{JobAiRequest, JobRequest} +import org.apache.linkis.governance.common.protocol.job.JobAiReqInsert +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.conf.LabelCommonConfig +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.{EngineType, EngineTypeLabel, UserCreatorLabel} +import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import org.springframework.beans.BeanUtils + +import java.{lang, util} +import java.util.Date + +import scala.collection.JavaConverters._ + +class AISQLTransformInterceptor extends EntranceInterceptor with Logging { + + override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + val aiSqlEnable: Boolean = "true".equals(AI_SQL_KEY.getValue) + val supportAISQLCreator: String = AI_SQL_CREATORS.toLowerCase() + val sqlLanguage: String = LANGUAGE_TYPE_AI_SQL + val sparkEngineType: String = AI_SQL_DEFAULT_SPARK_ENGINE_TYPE + val hiveEngineType: String = AI_SQL_DEFAULT_HIVE_ENGINE_TYPE + val labels: util.List[Label[_]] = jobRequest.getLabels + val codeType: String = LabelUtil.getCodeType(labels) + // engineType and creator have been verified in LabelCheckInterceptor. + val userCreatorOpt: Option[Label[_]] = labels.asScala.find(_.isInstanceOf[UserCreatorLabel]) + val creator: String = userCreatorOpt.get.asInstanceOf[UserCreatorLabel].getCreator + val engineTypeLabelOpt: Option[Label[_]] = labels.asScala.find(_.isInstanceOf[EngineTypeLabel]) + + val startMap: util.Map[String, AnyRef] = TaskUtils.getStartupMap(jobRequest.getParams) + + val engineTypeLabel: EngineTypeLabel = engineTypeLabelOpt.get.asInstanceOf[EngineTypeLabel] + + /** + * aiSql change to spark or hive + * 1. Use the spark engine when configuring spark parameter templates 2. Use the hive engine + * when configuring hive parameter templates 3. Request doctor to get engine type 4. Use + * spark by default or exception + */ + var currentEngineType: String = engineTypeLabel.getStringValue + if ( + aiSqlEnable && sqlLanguage + .equals(codeType) && supportAISQLCreator.contains(creator.toLowerCase()) + ) { + + logger.info(s"aisql enable for ${jobRequest.getId}") + startMap.put(AI_SQL_KEY.key, AI_SQL_KEY.getValue.asInstanceOf[AnyRef]) + startMap.put(RETRY_NUM_KEY.key, RETRY_NUM_KEY.getValue.asInstanceOf[AnyRef]) + logAppender.append(LogUtils.generateInfo(s"current code is aiSql task.\n")) + + // 用户配置了模板参数 + if (startMap.containsKey("ec.resource.name")) { + val hiveParamKeys: String = AI_SQL_HIVE_TEMPLATE_KEYS + if (containsKeySubstring(startMap, hiveParamKeys)) { + changeEngineLabel(hiveEngineType, labels) + logAppender.append( + LogUtils.generateInfo( + s"use $hiveEngineType by set ${startMap.get("ec.resource.name")} template.\n" + ) + ) + currentEngineType = hiveEngineType + } else { + changeEngineLabel(sparkEngineType, labels) + logAppender.append( + LogUtils.generateInfo( + s"use $sparkEngineType by set ${startMap.get("ec.resource.name")} template.\n" + ) + ) + currentEngineType = sparkEngineType + } + logger.info( + s"use ${startMap.get("ec.resource.name")} conf, use $currentEngineType execute task." + ) + } else { + logger.info(s"start intelligent selection execution engine for ${jobRequest.getId}") + val engineType: String = + EntranceUtils.getDynamicEngineType(jobRequest.getExecutionCode, logAppender) + if ("hive".equals(engineType)) { + changeEngineLabel(hiveEngineType, labels) + logAppender.append( + LogUtils.generateInfo(s"use $hiveEngineType by intelligent selection.\n") + ) + currentEngineType = hiveEngineType + } else { + changeEngineLabel(sparkEngineType, labels) + logAppender.append( + LogUtils.generateInfo(s"use $sparkEngineType by intelligent selection.\n") + ) + currentEngineType = sparkEngineType + } + logger.info( + s"end intelligent selection execution engine, and engineType is ${currentEngineType} for ${jobRequest.getId}." + ) + EntranceUtils.dealsparkDynamicConf(jobRequest, logAppender, jobRequest.getParams) + } + + persist(jobRequest); + } + + TaskUtils.addStartupMap(jobRequest.getParams, startMap) + jobRequest + } + + private def persist(jobRequest: JobRequest) = { + val sender: Sender = + Sender.getSender(Configuration.JOBHISTORY_SPRING_APPLICATION_NAME.getValue) + val jobAiRequest: JobAiRequest = new JobAiRequest + BeanUtils.copyProperties(jobRequest, jobAiRequest) + jobAiRequest.setId(null) + jobAiRequest.setJobHistoryId(jobRequest.getId + "") + jobAiRequest.setChangeTime(new Date()) + jobAiRequest.setEngineType(LabelUtil.getEngineType(jobRequest.getLabels)) + jobAiRequest.setSubmitCode(jobRequest.getExecutionCode) + val jobAiReqInsert: JobAiReqInsert = JobAiReqInsert(jobAiRequest) + logger.info(s"${jobRequest.getId} insert into ai_history: ${jobAiRequest}") + sender.ask(jobAiReqInsert) + logger.info(s"${jobRequest.getId} insert into ai_history end.") + } + + private def containsKeySubstring(map: util.Map[String, AnyRef], keywords: String): Boolean = { + if (StringUtils.isBlank(keywords) || map == null || map.isEmpty) { + false + } else { + // 将关键词字符串按逗号分隔成数组 + val keywordArray: Array[String] = keywords.split(",").map(_.trim) + + // 遍历 Map 的键,检查是否包含任何一个关键词 + map.keySet().asScala.exists { key => + keywordArray.exists(key.contains) + } + } + } + + private def changeEngineLabel(sparkEngineType: String, labels: util.List[Label[_]]): Unit = { + val it: util.Iterator[Label[_]] = labels.iterator() + // 移除引擎标签 + while (it.hasNext) { + if (it.next().isInstanceOf[EngineTypeLabel]) { + it.remove() + } + } + // 添加正确的引擎标签 + val newEngineTypeLabel: EngineTypeLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[EngineTypeLabel]) + newEngineTypeLabel.setEngineType(sparkEngineType.split("-")(0)) + newEngineTypeLabel.setVersion(sparkEngineType.split("-")(1)) + labels.add(newEngineTypeLabel) + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala index bbb904c6a71..34bd6ead01a 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala @@ -103,6 +103,7 @@ object ScalaCommentHelper extends CommentHelper { private val scalaCommentPattern: String = "(?ms)([\"'](?:|[^'])*['\"])|//.*?$|/\\*.*?\\*/" override def dealComment(code: String): String = code + } object CommentMain { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala index 7a7cb7463a9..d9386477e05 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala @@ -18,6 +18,7 @@ package org.apache.linkis.entrance.interceptor.impl import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils, VariableUtils} import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.linkis.manager.label.utils.LabelUtil @@ -42,7 +43,11 @@ object CustomVariableUtils extends Logging { * : requestPersistTask * @return */ - def replaceCustomVar(jobRequest: JobRequest, runType: String): String = { + def replaceCustomVar( + jobRequest: JobRequest, + runType: String, + logAppender: java.lang.StringBuilder + ): String = { val variables: util.Map[String, String] = new util.HashMap[String, String]() val sender = Sender.getSender(Configuration.CLOUD_CONSOLE_VARIABLE_SPRING_APPLICATION_NAME.getValue) @@ -65,9 +70,26 @@ object CustomVariableUtils extends Logging { .getVariableMap(jobRequest.getParams) .asInstanceOf[util.HashMap[String, String]] variables.putAll(variableMap) - if (!variables.containsKey("user")) { - variables.put("user", jobRequest.getExecuteUser) + variables.put("user", jobRequest.getExecuteUser) + // User customization is not supported. If the user has customized it, add a warning log and replace it + if (variables.containsKey("submit_user")) { + logAppender.append( + LogUtils.generateInfo( + "submitUser variable will be replaced by system value:" + jobRequest.getSubmitUser + " -> " + variables + .get("submit_user") + "\n" + ) + ) } + if (variables.containsKey("execute_user")) { + logAppender.append( + LogUtils.generateInfo( + "executeUser variable will be replaced by system value:" + jobRequest.getExecuteUser + " -> " + variables + .get("execute_user") + "\n" + ) + ) + } + variables.put("execute_user", jobRequest.getExecuteUser) + variables.put("submit_user", jobRequest.getSubmitUser) VariableUtils.replace(jobRequest.getExecutionCode, runType, variables) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala index 1914730d519..41c594bd24d 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala @@ -22,15 +22,18 @@ import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.conf.EntranceConfiguration.PYTHON_SAFE_CHECK_SWITCH import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceIllegalParamException} import org.apache.linkis.entrance.interceptor.exception.{ PythonCodeCheckException, ScalaCodeCheckException } +import org.apache.linkis.entrance.utils.SafeUtils import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.commons.lang3.StringUtils +import java.util.Locale import java.util.regex.Pattern import scala.collection.mutable.ArrayBuffer @@ -133,6 +136,8 @@ object SQLExplain extends Explain { .generateWarn("please pay attention ,SQL full export mode opened(请注意,SQL全量导出模式打开)\n") ) } + var isFirstTimePrintingLimit = true + var isFirstTimePrintingOverLimit = true if (tempCode.contains("""\;""")) { val semicolonIndexes = findRealSemicolonIndex(tempCode) var oldIndex = 0 @@ -142,21 +147,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim @@ -170,21 +181,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim @@ -225,10 +242,10 @@ object SQLExplain extends Explain { // 如果一段sql是 --xxx回车select * from default.users,那么他也是select语句 val realCode = cleanComment(code) // 以前,在判断,对于select* from xxx这样的SQL时会出现问题的,但是这种语法hive是支持的 - realCode.trim.split("\\s+")(0).toLowerCase.contains("select") + realCode.trim.split("\\s+")(0).toLowerCase(Locale.getDefault).contains("select") } - def continueWhenError = false + // def continueWhenError = false def isSelectCmdNoLimit(cmd: String): Boolean = { if (StringUtils.isEmpty(cmd)) { @@ -244,8 +261,10 @@ object SQLExplain extends Explain { val a = words.toArray val length = a.length if (a.length > 1) { - val second_last = a(length - 2) - !"limit".equals(second_last.toLowerCase()) + val second_last = a(length - 2).toLowerCase(Locale.getDefault) + // for some case eg:"SELECT * from dual WHERE (1=1)LIMIT 1;" + val result = !("limit".equals(second_last) || second_last.contains(")limit")) + result } else { false } @@ -254,8 +273,9 @@ object SQLExplain extends Explain { private def cleanComment(sql: String): String = { val cleanSql = new StringBuilder sql.trim.split(LINE_BREAK) foreach { singleSql => - if (!singleSql.trim().startsWith(COMMENT_FLAG)) + if (!singleSql.trim().startsWith(COMMENT_FLAG)) { cleanSql.append(singleSql).append(LINE_BREAK) + } } cleanSql.toString().trim } @@ -266,8 +286,8 @@ object SQLExplain extends Explain { } var overLimit: Boolean = false var code = cmd.trim - if (code.toLowerCase.contains("limit")) { - code = code.substring(code.toLowerCase().lastIndexOf("limit")).trim + if (code.toLowerCase(Locale.getDefault).contains(LIMIT)) { + code = code.substring(code.toLowerCase((Locale.getDefault)).lastIndexOf(LIMIT)).trim } val hasLimit = code.toLowerCase().matches("limit\\s+\\d+\\s*;?") if (hasLimit) { @@ -292,13 +312,14 @@ object SQLExplain extends Explain { * String */ def repairSelectOverLimit(cmd: String): String = { - var code = cmd.trim + val code = cmd.trim var preCode = "" var tailCode = "" - var limitNum = SQL_DEFAULT_LIMIT.getValue - if (code.toLowerCase.contains("limit")) { - preCode = code.substring(0, code.toLowerCase().lastIndexOf("limit")).trim - tailCode = code.substring(code.toLowerCase().lastIndexOf("limit")).trim + val limitNum = SQL_DEFAULT_LIMIT.getValue + val lowerCaseCode = code.toLowerCase(Locale.getDefault) + if (lowerCaseCode.contains(LIMIT)) { + preCode = code.substring(0, lowerCaseCode.lastIndexOf(LIMIT)).trim + tailCode = code.substring(lowerCaseCode.lastIndexOf(LIMIT)).trim } if (isUpperSelect(cmd)) preCode + " LIMIT " + limitNum else preCode + " limit " + limitNum } @@ -347,54 +368,60 @@ object PythonExplain extends Explain { if (EntranceConfiguration.SKIP_AUTH.getHotValue()) { return true } - - CAN_PASS_CODES - .split(";") - .foreach(c => { - if (code.contains(c)) { - if ( - IMPORT_SYS_MOUDLE - .findAllIn(code) - .nonEmpty || FROM_SYS_IMPORT.findAllIn(code).nonEmpty - ) - throw PythonCodeCheckException(20070, "can not use sys module") - else if ( - IMPORT_OS_MOUDLE.findAllIn(code).nonEmpty || FROM_OS_IMPORT.findAllIn(code).nonEmpty - ) - throw PythonCodeCheckException(20071, "can not use os module") - else if ( - IMPORT_PROCESS_MODULE - .findAllIn(code) - .nonEmpty || FROM_MULTIPROCESS_IMPORT.findAllIn(code).nonEmpty - ) - throw PythonCodeCheckException(20072, "can not use process module") - else if (SC_STOP.findAllIn(code).nonEmpty) - throw PythonCodeCheckException(20073, "You can not stop SparkContext, It's dangerous") - else if (FROM_NUMPY_IMPORT.findAllIn(code).nonEmpty) - throw PythonCodeCheckException(20074, "Numpy packages cannot be imported in this way") - } - }) - - code.split(System.lineSeparator()) foreach { code => - if (IMPORT_SYS_MOUDLE.findAllIn(code).nonEmpty || FROM_SYS_IMPORT.findAllIn(code).nonEmpty) - throw PythonCodeCheckException(20070, "can not use sys module") - else if (IMPORT_OS_MOUDLE.findAllIn(code).nonEmpty || FROM_OS_IMPORT.findAllIn(code).nonEmpty) - throw PythonCodeCheckException(20071, "can not use os moudle") - else if ( - IMPORT_PROCESS_MODULE.findAllIn(code).nonEmpty || FROM_MULTIPROCESS_IMPORT - .findAllIn(code) - .nonEmpty - ) - throw PythonCodeCheckException(20072, "can not use process module") - else if ( - IMPORT_SUBPORCESS_MODULE.findAllIn(code).nonEmpty || FROM_SUBPROCESS_IMPORT - .findAllIn(code) - .nonEmpty - ) - throw PythonCodeCheckException(20072, "can not use subprocess module") - else if (SC_STOP.findAllIn(code).nonEmpty) - throw PythonCodeCheckException(20073, "You can not stop SparkContext, It's dangerous") + if (PYTHON_SAFE_CHECK_SWITCH && (!SafeUtils.isCodeSafe(code))) { + throw PythonCodeCheckException(20074, "Invalid python code.(当前代码存在非法获取系统信息或执行非法命令等危险操作,禁止执行)") } +// CAN_PASS_CODES +// .split(";") +// .foreach(c => { +// if (code.contains(c)) { +// if ( +// IMPORT_SYS_MOUDLE +// .findAllIn(code) +// .nonEmpty || FROM_SYS_IMPORT.findAllIn(code).nonEmpty +// ) { +// throw PythonCodeCheckException(20070, "can not use sys module") +// } else if ( +// IMPORT_OS_MOUDLE.findAllIn(code).nonEmpty || FROM_OS_IMPORT.findAllIn(code).nonEmpty +// ) { +// throw PythonCodeCheckException(20071, "can not use os module") +// } else if ( +// IMPORT_PROCESS_MODULE +// .findAllIn(code) +// .nonEmpty || FROM_MULTIPROCESS_IMPORT.findAllIn(code).nonEmpty +// ) { +// throw PythonCodeCheckException(20072, "can not use process module") +// } else if (SC_STOP.findAllIn(code).nonEmpty) { +// throw PythonCodeCheckException(20073, "You can not stop SparkContext, It's dangerous") +// } else if (FROM_NUMPY_IMPORT.findAllIn(code).nonEmpty) { +// throw PythonCodeCheckException(20074, "Numpy packages cannot be imported in this way") +// } +// } +// }) +// +// code.split(System.lineSeparator()) foreach { code => +// if (IMPORT_SYS_MOUDLE.findAllIn(code).nonEmpty || FROM_SYS_IMPORT.findAllIn(code).nonEmpty) { +// throw PythonCodeCheckException(20070, "can not use sys module") +// } else if ( +// IMPORT_OS_MOUDLE.findAllIn(code).nonEmpty || FROM_OS_IMPORT.findAllIn(code).nonEmpty +// ) { +// throw PythonCodeCheckException(20071, "can not use os moudle") +// } else if ( +// IMPORT_PROCESS_MODULE.findAllIn(code).nonEmpty || FROM_MULTIPROCESS_IMPORT +// .findAllIn(code) +// .nonEmpty +// ) { +// throw PythonCodeCheckException(20072, "can not use process module") +// } else if ( +// IMPORT_SUBPORCESS_MODULE.findAllIn(code).nonEmpty || FROM_SUBPROCESS_IMPORT +// .findAllIn(code) +// .nonEmpty +// ) { +// throw PythonCodeCheckException(20072, "can not use subprocess module") +// } else if (SC_STOP.findAllIn(code).nonEmpty) { +// throw PythonCodeCheckException(20073, "You can not stop SparkContext, It's dangerous") +// } +// } true } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala index 1b093bedd9d..5ed8d88fe0d 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala @@ -24,6 +24,8 @@ import org.apache.linkis.entrance.interceptor.exception.LogPathCreateException import org.apache.linkis.entrance.parser.ParserUtils import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.commons.lang3.exception.ExceptionUtils + /** * Description:Log path generation interceptor, used to set the path log of the task(日志路径生成拦截器, * 用于设置task的路径日志) @@ -41,14 +43,14 @@ class LogPathCreateInterceptor extends EntranceInterceptor with Logging { case e: ErrorException => val exception: LogPathCreateException = LogPathCreateException( 20075, - "Failed to get logPath(获取logPath失败),reason: " + e.getMessage + "Failed to get logPath(获取logPath失败),reason msg: " + e.getMessage ) exception.initCause(e) exception case t: Throwable => val exception: LogPathCreateException = LogPathCreateException( 20075, - "Failed to get logPath(获取logPath失败), reason: " + t.getCause + "Failed to get logPath(获取logPath失败), reason: " + ExceptionUtils.getStackTrace(t) ) exception.initCause(t) exception diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SensitiveCheckInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SensitiveCheckInterceptor.scala new file mode 100644 index 00000000000..313b0073ce9 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SensitiveCheckInterceptor.scala @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.common.log.LogUtils +import org.apache.linkis.common.utils.CodeAndRunTypeUtils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.interceptor.EntranceInterceptor +import org.apache.linkis.entrance.interceptor.exception.CodeCheckException +import org.apache.linkis.entrance.utils.EntranceUtils +import org.apache.linkis.entrance.utils.EntranceUtils.logInfo +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.manager.label.utils.LabelUtil + +import org.apache.commons.lang3.StringUtils + +import java.lang + +class SensitiveCheckInterceptor extends EntranceInterceptor { + + override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + if (!EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_SWITCH) { + return jobRequest + } + + val isWhiteList = EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_WHITELIST.contains( + jobRequest.getExecuteUser + ) || + EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_WHITELIST.contains(jobRequest.getSubmitUser) + if (isWhiteList) { + logAppender.append( + LogUtils + .generateInfo(s"Sensitive SQL Check: whiteList contains user ! Skip Check\n") + ) + return jobRequest + } + val labellist = jobRequest.getLabels + + val engineType = LabelUtil.getEngineTypeLabel(labellist).getEngineType + if (!EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_ENGINETYPE.contains(engineType)) { + return jobRequest + } + + val codeType = Option(LabelUtil.getCodeType(labellist)) + .map(_.toLowerCase()) + .getOrElse("") + + val languageType = CodeAndRunTypeUtils.getLanguageTypeByCodeType(codeType) + if (!EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_RUNTYPE.contains(languageType)) { + return jobRequest + } + + val creator = LabelUtil.getUserCreatorLabel(labellist).getCreator + if ( + StringUtils.isNotBlank( + EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_CREATOR + ) && (!EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_CREATOR.contains(creator)) + ) { + return jobRequest + } + + val executeUserDepartmentId = EntranceUtils.getUserDepartmentId(jobRequest.getExecuteUser) + val submitUserDepartmentId = EntranceUtils.getUserDepartmentId(jobRequest.getSubmitUser) + if ( + (StringUtils.isNotBlank( + executeUserDepartmentId + ) && EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_DEPARTMENT.contains( + executeUserDepartmentId + )) || ( + StringUtils.isNotBlank( + submitUserDepartmentId + ) && EntranceConfiguration.DOCTOR_SENSITIVE_SQL_CHECK_DEPARTMENT.contains( + submitUserDepartmentId + ) + ) + ) { + val (result, reason) = + EntranceUtils.sensitiveSqlCheck( + jobRequest.getExecutionCode, + languageType, + engineType, + jobRequest.getExecuteUser, + logAppender + ) + if (result) { + throw CodeCheckException(20054, "当前操作涉及明文信息读取,禁止执行该操作, 原因:" + reason) + } + } + jobRequest + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/StorePathEntranceInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/StorePathEntranceInterceptor.scala deleted file mode 100644 index d05dce4bc47..00000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/StorePathEntranceInterceptor.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.interceptor.impl - -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} -import org.apache.linkis.entrance.interceptor.EntranceInterceptor -import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.governance.common.entity.job.JobRequest -import org.apache.linkis.manager.label.utils.LabelUtil -import org.apache.linkis.protocol.utils.TaskUtils -import org.apache.linkis.server.BDPJettyServerHelper - -import org.apache.commons.lang3.time.DateFormatUtils - -import java.util - -import scala.collection.JavaConverters.{asScalaBufferConverter, mapAsScalaMapConverter} - -class StorePathEntranceInterceptor extends EntranceInterceptor with Logging { - - /** - * The apply function is to supplement the information of the incoming parameter task, making the - * content of this task more complete. Additional information includes: database information - * supplement, custom variable substitution, code check, limit limit, etc. - * apply函数是对传入参数task进行信息的补充,使得这个task的内容更加完整。 补充的信息包括: 数据库信息补充、自定义变量替换、代码检查、limit限制等 - * - * @param jobReq - * @return - */ - override def apply(jobReq: JobRequest, logAppender: java.lang.StringBuilder): JobRequest = { - var parentPath: String = GovernanceCommonConf.RESULT_SET_STORE_PATH.getValue - if (!parentPath.endsWith("/")) parentPath += "/" - parentPath += jobReq.getExecuteUser - if (!parentPath.endsWith("/")) parentPath += "/linkis/" - else parentPath += "linkis/" - val userCreator = LabelUtil.getUserCreator(jobReq.getLabels) - if (null == userCreator) { - val labelJson = - BDPJettyServerHelper.gson.toJson(jobReq.getLabels.asScala.filter(_ != null).map(_.toString)) - throw new EntranceErrorException( - EntranceErrorCode.LABEL_PARAMS_INVALID.getErrCode, - s"UserCreator cannot be empty in labels : ${labelJson} of job with id : ${jobReq.getId}" - ) - } - // multi linkis cluster should not use same root folder , in which case result file may be overwrite - parentPath += DateFormatUtils.format(System.currentTimeMillis, "yyyy-MM-dd/HHmmss") + "/" + - userCreator._2 + "/" + jobReq.getId - val paramsMap = if (null != jobReq.getParams) { - jobReq.getParams - } else { - new util.HashMap[String, AnyRef]() - } - - var runtimeMap = TaskUtils.getRuntimeMap(paramsMap) - if (null == runtimeMap || runtimeMap.isEmpty) { - runtimeMap = new util.HashMap[String, AnyRef]() - } - runtimeMap.put(GovernanceCommonConf.RESULT_SET_STORE_PATH.key, parentPath) - TaskUtils.addRuntimeMap(paramsMap, runtimeMap) - val params = new util.HashMap[String, AnyRef]() - paramsMap.asScala.foreach(kv => params.put(kv._1, kv._2)) - jobReq.setResultLocation(parentPath) - jobReq.setParams(params) - jobReq - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala new file mode 100644 index 00000000000..6accd30bd59 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.interceptor.EntranceInterceptor +import org.apache.linkis.governance.common.entity.job.JobRequest + +import java.lang + +class TemplateConfInterceptor extends EntranceInterceptor { + + override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + if (EntranceConfiguration.TEMPLATE_CONF_SWITCH.getValue) { + TemplateConfUtils.dealWithTemplateConf(jobRequest, logAppender) + } else { + jobRequest + } + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala new file mode 100644 index 00000000000..d7c95c964a3 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala @@ -0,0 +1,388 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.exception.LinkisCommonErrorException +import org.apache.linkis.common.log.LogUtils +import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils} +import org.apache.linkis.common.utils.CodeAndRunTypeUtils.LANGUAGE_TYPE_AI_SQL +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.utils.EntranceUtils +import org.apache.linkis.governance.common.entity.TemplateConfKey +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.conf.{TemplateConfRequest, TemplateConfResponse} +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.conf.LabelCommonConfig +import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.manager.label.entity.engine.{ + EngineType, + EngineTypeLabel, + FixedEngineConnLabel +} +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel +import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.rpc.Sender +import org.apache.linkis.server.BDPJettyServerHelper + +import org.apache.commons.lang3.StringUtils + +import java.{lang, util} +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ +import scala.util.matching.{Regex, UnanchoredRegex} + +import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} + +object TemplateConfUtils extends Logging { + + val confTemplateNameKey = "ec.resource.name" + val confFixedEngineConnLabelKey = "ec.fixed.sessionId" + + /** + * 按模板uuid缓存模板配置 + */ + private val templateCache: LoadingCache[String, util.List[TemplateConfKey]] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new CacheLoader[String, util.List[TemplateConfKey]]() { + + override def load(templateUuid: String): util.List[TemplateConfKey] = { + var templateList = Utils.tryAndWarn { + val sender: Sender = Sender + .getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + + logger.info(s"load template configuration data templateUuid:$templateUuid") + val res = sender.ask(new TemplateConfRequest(templateUuid)) match { + case response: TemplateConfResponse => + logger + .debug(s"${response.getList()}") + response.getList + case _ => + logger + .warn(s"load template configuration data templateUuid:$templateUuid loading failed") + new util.ArrayList[TemplateConfKey](0) + } + res + } + if (templateList.size() == 0) { + logger.warn(s"template configuration data loading failed, plaese check warn log") + } + templateList + } + + }) + + /** + * 按模板名称缓存模板配置 + */ + private val templateCacheName: LoadingCache[String, util.List[TemplateConfKey]] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new CacheLoader[String, util.List[TemplateConfKey]]() { + + override def load(templateName: String): util.List[TemplateConfKey] = { + var templateList = Utils.tryAndWarn { + val sender: Sender = Sender + .getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + + logger.info(s"load template configuration data templateName:$templateName") + val res = sender.ask(new TemplateConfRequest(null, templateName)) match { + case response: TemplateConfResponse => + logger + .debug(s"${response.getList()}") + response.getList + case _ => + logger + .warn(s"load template configuration data templateName:$templateName loading failed") + new util.ArrayList[TemplateConfKey](0) + } + res + } + + if (templateList.size() == 0) { + logger.warn(s"template configuration data loading failed, plaese check warn log") + } + templateList + } + + }) + + /** + * Get user-defined template conf name value + * + * @param code + * :code + * @param codeType + * :sql,hql,scala + * @return + * String the last one of template conf name + */ + def getCustomTemplateConfName( + jobRequest: JobRequest, + codeType: String, + logAppender: lang.StringBuilder + ): String = { + var code = jobRequest.getExecutionCode + var templateConfName = ""; + + var varString: String = null + var errString: String = null + var fixECString: String = null + + val languageType = CodeAndRunTypeUtils.getLanguageTypeByCodeType(codeType) + + languageType match { + case CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL => + varString = s"""\\s*---@set ${confTemplateNameKey}=\\s*.+\\s*""" + fixECString = s"""\\s*---@set\\s+${confFixedEngineConnLabelKey}\\s*=\\s*([^;]+)(?:\\s*;)?""" + errString = """\s*---@.*""" + case CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON | CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL => + varString = s"""\\s*##@set ${confTemplateNameKey}=\\s*.+\\s*""" + fixECString = s"""\\s*##@set\\s+${confFixedEngineConnLabelKey}\\s*=\\s*([^;]+)(?:\\s*;)?""" + errString = """\s*##@""" + case CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA => + varString = s"""\\s*///@set ${confTemplateNameKey}=\\s*.+\\s*""" + fixECString = s"""\\s*///@set\\s+${confFixedEngineConnLabelKey}\\s*=\\s*([^;]+)(?:\\s*;)?""" + errString = """\s*///@.+""" + case _ => + return templateConfName + } + + val customRegex = varString.r.unanchored + val fixECRegex: UnanchoredRegex = fixECString.r.unanchored + val errRegex = errString.r.unanchored + var codeRes = code.replaceAll("\r\n", "\n") + + // 匹配任意行,只能是单独的行 + if (codeRes.contains(confTemplateNameKey) || codeRes.contains(confFixedEngineConnLabelKey)) { + val res = codeRes.split("\n") + // 用于标识,匹配到就退出 + var matchFlag = false + res.foreach(str => { + if (matchFlag) { + return templateConfName + } + str match { + case customRegex() => + val clearStr = if (str.endsWith(";")) str.substring(0, str.length - 1) else str + val res: Array[String] = clearStr.split("=") + if (res != null && res.length == 2) { + templateConfName = res(1).trim + logger.info(s"get template conf name $templateConfName") + } else { + if (res.length > 2) { + throw new LinkisCommonErrorException( + 20044, + s"$str template conf name var defined uncorrectly" + ) + } else { + throw new LinkisCommonErrorException( + 20045, + s"template conf name var was defined uncorrectly:$str" + ) + } + } + matchFlag = true + case fixECRegex(sessionId) => + // deal with fixedEngineConn configuration, add fixedEngineConn label if setting @set ec.fixed.sessionId=xxx + if (StringUtils.isNotBlank(sessionId)) { + val fixedEngineConnLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel( + classOf[FixedEngineConnLabel] + ) + fixedEngineConnLabel.setSessionId(sessionId) + jobRequest.getLabels.add(fixedEngineConnLabel) + logger.info( + s"The task ${jobRequest.getId} is set to fixed engine conn, labelValue: ${sessionId}" + ) + logAppender.append( + s"The task ${jobRequest.getId} is set to fixed engine conn, labelValue: ${sessionId}" + ) + } else { + logger.info(s"The task ${jobRequest.getId} not set fixed engine conn") + } + matchFlag = true + case errRegex() => + logger.warn( + s"The template conf name var definition is incorrect:$str,if it is not used, it will not run the error, but it is recommended to use the correct specification to define" + ) + case _ => + } + }) + } + templateConfName + } + + def dealWithTemplateConf(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + jobRequest match { + case requestPersistTask: JobRequest => + val params = requestPersistTask.getParams + val startMap = TaskUtils.getStartupMap(params) + val runtimeMap: util.Map[String, AnyRef] = TaskUtils.getRuntimeMap(params) + + var templateConflist: util.List[TemplateConfKey] = new util.ArrayList[TemplateConfKey]() + var templateName: String = "" + // only for Creator:IDE, try to get template conf name from code string. eg:---@set ec.resource.name=xxxx + val codeType = LabelUtil.getCodeType(jobRequest.getLabels) + val (user, creator) = LabelUtil.getUserCreator(jobRequest.getLabels) + if (EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue.equals(creator)) { + templateName = getCustomTemplateConfName(jobRequest, codeType, logAppender) + if (StringUtils.isNotBlank(templateName)) { + logAppender.append( + LogUtils + .generateInfo(s"Try to execute task with template: $templateName in script.\n") + ) + } + } + + // 处理runtime参数中的模板名称,用于失败任务重试的时候使用模板参数重试 + var runtimeTemplateFlag = false + if ( + EntranceConfiguration.SUPPORT_TEMPLATE_CONF_RETRY_ENABLE.getValue && StringUtils + .isBlank(templateName) + ) { + templateName = + runtimeMap.getOrDefault(LabelKeyConstant.TEMPLATE_CONF_NAME_KEY, "").toString + if (StringUtils.isNotBlank(templateName)) { + runtimeTemplateFlag = true + logAppender.append( + LogUtils.generateInfo( + s"Try to execute task with template: $templateName in runtime params.\n" + ) + ) + } + } + + // code template name > start params template uuid + if (StringUtils.isBlank(templateName)) { + logger.debug("jobRequest startMap param template name is empty") + + logger.info("jobRequest startMap params :{} ", startMap) + val templateUuid = startMap.getOrDefault(LabelKeyConstant.TEMPLATE_CONF_KEY, "").toString + + if (StringUtils.isBlank(templateUuid)) { + logger.debug("jobRequest startMap param template id is empty") + } else { + logger.info("try to get template conf list with template uid:{} ", templateUuid) + logAppender.append( + LogUtils + .generateInfo(s"Try to get template conf data with template uid:$templateUuid\n") + ) + templateConflist = templateCache.get(templateUuid) + if (templateConflist == null || templateConflist.size() == 0) { + logAppender.append( + LogUtils.generateInfo( + s"Can not get any template conf data with template uid:$templateUuid\n" + ) + ) + } else { + templateName = templateConflist.get(0).getTemplateName + } + } + } else { + logger.info("Try to get template conf list with template name:[{}]", templateName) + logAppender.append( + LogUtils + .generateInfo(s"Try to get template conf data with template name:[$templateName]\n") + ) + val cacheList: util.List[TemplateConfKey] = templateCacheName.get(templateName) + templateConflist.addAll(cacheList) + if (templateConflist == null || templateConflist.size() == 0) { + logAppender.append( + LogUtils.generateInfo( + s"Can not get any template conf data with template name:$templateName\n" + ) + ) + } else { + // to remove metedata start param + TaskUtils.clearStartupMap(params) + + if (EntranceConfiguration.TEMPLATE_CONF_ADD_ONCE_LABEL_ENABLE.getValue) { + val onceLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel( + classOf[ExecuteOnceLabel] + ) + logger.info("Add once label for task id:{}", requestPersistTask.getId.toString) + requestPersistTask.getLabels.add(onceLabel) + } + } + } + + // 针对aisql处理模板参数 + val isAisql = LANGUAGE_TYPE_AI_SQL.equals(codeType) + if ( + isAisql && runtimeTemplateFlag && templateConflist != null && templateConflist + .size() > 0 + ) { + logger.info("aisql deal with template in runtime params.") + logAppender.append( + LogUtils.generateInfo( + s"If task execution fails, the template $templateName configuration parameters will be used to rerun the task\n" + ) + ) + val keyList = new util.HashMap[String, AnyRef]() + templateConflist.asScala.foreach(ele => { + keyList.put(ele.getKey, ele.getConfigValue) + }) + val confRuntimeMap = new util.HashMap[String, AnyRef]() + confRuntimeMap.put(LabelKeyConstant.TEMPLATE_CONF_NAME_KEY, keyList) + // 缓存配置到runtime + TaskUtils.addRuntimeMap(params, confRuntimeMap) + // 如果是aisql则不需要手动处理模板参数 + templateConflist.clear() + } + + if (templateConflist != null && templateConflist.size() > 0) { + val keyList = new util.HashMap[String, AnyRef]() + templateConflist.asScala.foreach(ele => { + val key = ele.getKey + val oldValue = startMap.get(key) + if (oldValue != null && StringUtils.isNotBlank(oldValue.toString)) { + logger.info(s"key:$key value:$oldValue not empty, skip to deal") + } else { + val newValue = ele.getConfigValue + logger.info(s"key:$key value:$newValue will add to startMap params") + if (TaskUtils.isWithDebugInfo(params)) { + logAppender.append(LogUtils.generateInfo(s"add $key=$newValue\n")) + } + keyList.put(key, newValue) + } + }) + if (keyList.size() > 0) { + logger.info(s"use template conf for templateName: ${templateName}") + keyList.put(confTemplateNameKey, templateName) + logAppender.append( + LogUtils + .generateInfo(s"use template conf with templateName: ${templateName} \n") + ) + TaskUtils.addStartupMap(params, keyList) + } + } else if (!isAisql) { + EntranceUtils.dealsparkDynamicConf(jobRequest, logAppender, jobRequest.getParams) + } + case _ => + } + jobRequest + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala index 1deee62a729..49f44edbae6 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala @@ -95,6 +95,11 @@ object TenantLabelSetUtils extends Logging { "*-" + LabelUtil.getUserCreatorLabel(jobRequest.getLabels).getCreator.toLowerCase() ) } + if (StringUtils.isBlank(tenant)) { + tenant = userCreatorTenantCache.get( + LabelUtil.getUserCreatorLabel(jobRequest.getLabels).getUser.toLowerCase() + "-*" + ) + } logger.info("get cache tenant:" + tenant + ",jobRequest:" + jobRequest.getId) // Add cached data if it is not empty if (StringUtils.isNotBlank(tenant)) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala index 573c134493c..653e9ad78bb 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala @@ -67,7 +67,7 @@ object UserCreatorIPCheckUtils extends Logging { def checkUserIp(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { // Get IP address - val jobIp = jobRequest.getSource.get(TaskConstant.REQUEST_IP) + val jobIp = jobRequest.getSource.getOrDefault(TaskConstant.REQUEST_IP, "") logger.debug(s"start to checkTenantLabel $jobIp") if (StringUtils.isNotBlank(jobIp)) { jobRequest match { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala index 0487a238cfa..033fe7aab25 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala @@ -24,9 +24,13 @@ import org.apache.linkis.entrance.interceptor.EntranceInterceptor import org.apache.linkis.entrance.interceptor.exception.VarSubstitutionException import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.server.toScalaMap import org.apache.commons.lang3.exception.ExceptionUtils +import java.util + /** * Description: For variable substitution(用于变量替换) */ @@ -41,10 +45,40 @@ class VarSubstitutionInterceptor extends EntranceInterceptor { LogUtils.generateInfo("Program is substituting variables for you") + "\n" ) val codeType = LabelUtil.getCodeType(jobRequest.getLabels) - jobRequest.setExecutionCode(CustomVariableUtils.replaceCustomVar(jobRequest, codeType)) + val realCode = CustomVariableUtils.replaceCustomVar(jobRequest, codeType, logAppender) + jobRequest.setExecutionCode(realCode) logAppender.append( LogUtils.generateInfo("Variables substitution ended successfully") + "\n" ) + logAppender.append(LogUtils.generateInfo("Job variables is") + "\n") + logAppender.append( + "************************************Variable************************************" + "\n" + ) + val variableMap = TaskUtils + .getVariableMap(jobRequest.getParams) + .asInstanceOf[util.HashMap[String, String]] + variableMap.foreach { case (key, value) => + logAppender.append(s"$key=$value\n") + } + logAppender.append("\n"); + logAppender.append( + "************************************Variable************************************" + "\n" + ) + // print code after variables substitution + logAppender.append( + LogUtils.generateInfo( + "You have submitted a new job, script code (after variable substitution) is" + ) + "\n" + ); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ) + logAppender.append(realCode); + logAppender.append("\n"); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ); + jobRequest } { case e: VarSubstitutionException => diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala index 44474ee0ca6..3c5173a807f 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/Cache.scala @@ -18,7 +18,12 @@ package org.apache.linkis.entrance.log class Cache(maxCapacity: Int) { - val cachedLogs: LoopArray[String] = LoopArray[String](maxCapacity) + var cachedLogs: LoopArray[String] = LoopArray[String](maxCapacity) + + def clearCachedLogs(): Unit = { + this.cachedLogs = null + } + } object Cache { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala index 483cf9ab43a..406d43e5bc9 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala @@ -19,7 +19,11 @@ package org.apache.linkis.entrance.log import org.apache.linkis.common.io.{Fs, FsPath} import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.exception.LogReadFailedException import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.utils.StorageUtils import java.io.{InputStream, IOException} import java.util @@ -36,13 +40,26 @@ class CacheLogReader(logPath: String, charset: String, sharedCache: Cache, user: var closed = false private def createInputStream: InputStream = { + if (!logPath.contains(user)) { + throw new LogReadFailedException( + s"${user} does not have permission to read the path $logPath" + ) + } + val fsPath = new FsPath(logPath) if (fileSystem == null) lock synchronized { if (fileSystem == null) { - fileSystem = FSFactory.getFsByProxyUser(new FsPath(logPath), user) + + fileSystem = + if (StorageUtils.isHDFSPath(fsPath) && EntranceConfiguration.ENABLE_HDFS_JVM_USER) { + FSFactory.getFs(new FsPath(logPath)).asInstanceOf[FileSystem] + } else { + FSFactory.getFsByProxyUser(new FsPath(logPath), user).asInstanceOf[FileSystem] + } + fileSystem.init(new util.HashMap[String, String]()) } } - val inputStream: InputStream = fileSystem.read(new FsPath(logPath)) + val inputStream: InputStream = fileSystem.read(fsPath) inputStream } @@ -51,21 +68,39 @@ class CacheLogReader(logPath: String, charset: String, sharedCache: Cache, user: } override protected def readLog(deal: String => Unit, fromLine: Int, size: Int): Int = { - if (!sharedCache.cachedLogs.nonEmpty) return super.readLog(deal, fromLine, size) + if (sharedCache.cachedLogs == null || sharedCache.cachedLogs.isEmpty) { + return super.readLog(deal, fromLine, size) + } val min = sharedCache.cachedLogs.min val max = sharedCache.cachedLogs.max + + val fakeClearEleNums = sharedCache.cachedLogs.fakeClearEleNums + if (fromLine > max) return 0 - val from = fromLine - val to = if (fromLine >= min) { - if (size >= 0 && max >= fromLine + size) fromLine + size else max + 1 - } else { + + var from = fromLine + val end = + if (size >= 0 && max >= fromLine + size) { + fromLine + size + } else { + max + 1 + } + + var readNums = 0 + // The log may have been refreshed to the log file regularly and cannot be determined based on min. + if (fromLine < fakeClearEleNums) { // If you are getting it from a file, you don't need to read the cached data again. In this case, you can guarantee that the log will not be missing. - val read = super.readLog(deal, fromLine, size) - return read - } + readNums = super.readLog(deal, fromLine, size) + if ((fromLine + size) < min) { + return readNums + } else { + from = from + readNums + } + } else {} + + (from until end) map sharedCache.cachedLogs.get foreach deal + end - from + readNums - (from until to) map sharedCache.cachedLogs.get foreach deal - to - fromLine } @throws[IOException] diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala index 9028c469ab6..8f1cea1b183 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala @@ -33,20 +33,26 @@ class CacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: def getCache: Option[Cache] = Some(sharedCache) private def cache(msg: String): Unit = { + if (sharedCache.cachedLogs == null) { + return + } this synchronized { - val removed = sharedCache.cachedLogs.add(msg) + val isNextOneEmpty = sharedCache.cachedLogs.isNextOneEmpty val currentTime = new Date(System.currentTimeMillis()) - if (removed != null || currentTime.after(pushTime)) { + + if (isNextOneEmpty == false || currentTime.after(pushTime)) { val logs = sharedCache.cachedLogs.toList val sb = new StringBuilder - if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) + // need append latest msg before clear + sb.append(msg) sharedCache.cachedLogs.fakeClear() super.write(sb.toString()) pushTime.setTime( currentTime.getTime + EntranceConfiguration.LOG_PUSH_INTERVAL_TIME.getValue ) } + sharedCache.cachedLogs.add(msg) } } @@ -63,10 +69,12 @@ class CacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: override def flush(): Unit = { val sb = new StringBuilder - sharedCache.cachedLogs.toList - .filter(StringUtils.isNotEmpty) - .foreach(sb.append(_).append("\n")) - sharedCache.cachedLogs.clear() + if (sharedCache.cachedLogs != null) { + sharedCache.cachedLogs.toList + .filter(StringUtils.isNotEmpty) + .foreach(sb.append(_).append("\n")) + sharedCache.cachedLogs.clear() + } super.write(sb.toString()) super.flush() } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala index 54914b60021..4b082342ceb 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala @@ -18,6 +18,12 @@ package org.apache.linkis.entrance.log import org.apache.linkis.errorcode.client.handler.LinkisErrorCodeHandler +import org.apache.linkis.errorcode.client.manager.LinkisErrorCodeManager +import org.apache.linkis.errorcode.common.LinkisErrorCode + +import java.util + +import scala.collection.JavaConverters.asScalaBufferConverter abstract class ErrorCodeManager { @@ -35,6 +41,22 @@ abstract class ErrorCodeManager { None } + def errorMatchAndGetContent(log: String): Option[(String, String, String)] = { + getErrorCodes.foreach(e => + if (e.regex.findFirstIn(log).isDefined) { + val matched = e.regex.unapplySeq(log) + if (matched.nonEmpty) { + return Some( + e.code, + e.message.format(matched.get: _*), + e.regex.findFirstIn(log).getOrElse("") + ) + } else Some(e.code, e.message, "") + } + ) + None + } + } /** @@ -44,7 +66,24 @@ object FlexibleErrorCodeManager extends ErrorCodeManager { private val errorCodeHandler = LinkisErrorCodeHandler.getInstance() - override def getErrorCodes: Array[ErrorCode] = Array.empty + private val linkisErrorCodeManager = LinkisErrorCodeManager.getInstance + + override def getErrorCodes: Array[ErrorCode] = { + val errorCodes: util.List[LinkisErrorCode] = linkisErrorCodeManager.getLinkisErrorCodes + if (errorCodes == null) { + Array.empty + } else { + errorCodes.asScala + .map(linkisErrorCode => + ErrorCode( + linkisErrorCode.getErrorRegex, + linkisErrorCode.getErrorCode, + linkisErrorCode.getErrorDesc + ) + ) + .toArray + } + } override def errorMatch(log: String): Option[(String, String)] = { val errorCodes = errorCodeHandler.handle(log) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala index 24633dfbb20..ff04640afa0 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala @@ -37,11 +37,15 @@ import java.util class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: String) extends LogWriter(charset) { - if (StringUtils.isBlank(logPath)) + if (StringUtils.isBlank(logPath)) { throw new EntranceErrorException(LOGPATH_NOT_NULL.getErrorCode, LOGPATH_NOT_NULL.getErrorDesc) + } - protected var fileSystem = + protected var fileSystem = if (EntranceConfiguration.ENABLE_HDFS_JVM_USER) { + FSFactory.getFs(new FsPath(logPath)).asInstanceOf[FileSystem] + } else { FSFactory.getFsByProxyUser(new FsPath(logPath), user).asInstanceOf[FileSystem] + } override protected var outputStream: OutputStream = null @@ -55,7 +59,12 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u private def init(): Unit = { fileSystem.init(new util.HashMap[String, String]()) - FileSystemUtils.createNewFileWithFileSystem(fileSystem, new FsPath(logPath), user, true) + FileSystemUtils.createNewFileAndSetOwnerWithFileSystem( + fileSystem, + new FsPath(logPath), + user, + true + ) } @throws[IOException] @@ -91,13 +100,15 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u def getCache: Option[Cache] = Some(sharedCache) private def cache(msg: String): Unit = { + if (sharedCache.cachedLogs == null) { + return + } WRITE_LOCKER synchronized { - val removed = sharedCache.cachedLogs.add(msg) + val isNextOneEmpty = sharedCache.cachedLogs.isNextOneEmpty val currentTime = new Date(System.currentTimeMillis()) - if (removed != null || currentTime.after(pushTime)) { + if (isNextOneEmpty == false || currentTime.after(pushTime)) { val logs = sharedCache.cachedLogs.toList val sb = new StringBuilder - if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) sharedCache.cachedLogs.fakeClear() writeToFile(sb.toString()) @@ -105,17 +116,17 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u currentTime.getTime + EntranceConfiguration.LOG_PUSH_INTERVAL_TIME.getValue ) } + sharedCache.cachedLogs.add(msg) } } private def writeToFile(msg: String): Unit = WRITE_LOCKER synchronized { - val log = - if (!firstWrite) "\n" + msg - else { - logger.info(s"$toString write first one line log") - firstWrite = false - msg - } + val log = msg + if (firstWrite) { + logger.info(s"$toString write first one line log") + firstWrite = false + msg + } Utils.tryAndWarnMsg { getOutputStream.write(log.getBytes(charset)) }(s"$toString error when write query log to outputStream.") @@ -133,10 +144,12 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u override def flush(): Unit = { val sb = new StringBuilder - sharedCache.cachedLogs.toList - .filter(_ != null) - .foreach(sb.append(_).append("\n")) - sharedCache.cachedLogs.clear() + if (sharedCache.cachedLogs != null) { + sharedCache.cachedLogs.toList + .filter(_ != null) + .foreach(sb.append(_).append("\n")) + sharedCache.cachedLogs.clear() + } writeToFile(sb.toString()) } @@ -146,6 +159,7 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u fileSystem.close() fileSystem = null }(s"$toString Error encounters when closing fileSystem") + sharedCache.clearCachedLogs() } override def toString: String = logPath diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala index 626a643a0b0..19f4c5c6ada 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala @@ -17,6 +17,7 @@ package org.apache.linkis.entrance.log +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.EntranceContext import org.apache.linkis.entrance.job.EntranceExecutionJob @@ -59,10 +60,19 @@ abstract class LogManager extends LogListener with Logging { } } } - entranceExecutionJob.getLogWriter.foreach(logWriter => logWriter.write(log)) - errorCodeManager.foreach(_.errorMatch(log).foreach { case (code, errorMsg) => - errorCodeListener.foreach(_.onErrorCodeCreated(job, code, errorMsg)) + var writeLog = log + errorCodeManager.foreach(_.errorMatchAndGetContent(log).foreach { + case (code, errorMsg, targetMsg) => + if (!targetMsg.contains(LogUtils.ERROR_STR) && log.contains(LogUtils.ERROR_STR)) { + writeLog = LogUtils.generateERROR( + s"error code: $code, errorMsg: $errorMsg, errorLine: $targetMsg \n" + log + ) + } + errorCodeListener.foreach(_.onErrorCodeCreated(job, code, errorMsg)) + case _ => }) + entranceExecutionJob.getLogWriter.foreach(logWriter => logWriter.write(writeLog)) + case _ => } } { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala index 1d5f0cbda9f..da7f058fd88 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogReader.scala @@ -115,8 +115,11 @@ abstract class LogReader(charset: String) extends Closeable with Logging { } protected def readLog(deal: String => Unit, fromLine: Int, size: Int = 100): Int = { - val from = if (fromLine < 0) 0 else fromLine - var line, read = 0 + + // fromline param with begin 1 ,if set 0 missing first line + val from = if (fromLine < 1) 1 else fromLine + var line = 1 + var read = 0 val inputStream = getInputStream val lineIterator = IOUtils.lineIterator(inputStream, charset) Utils.tryFinally(while (lineIterator.hasNext && (read < size || size < 0)) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala index 155d8c7bd58..ff0dfbba841 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala @@ -23,21 +23,33 @@ class LoopArray[T](maxCapacity: Int) { def this() = this(32) + // realSize 游标之前的数据 已经被重写覆盖了 + // The data before realSize cursor has been overwritten by rewriting protected[this] var realSize = 0 - private var flag = 0 + + // the loop begin indx + private var front = 0 + + // the loop last index + // 尾部 下一个存储的游标 private var tail = 0 + private var clearEleNums = 0 + def add(event: T): T = { var t = null.asInstanceOf[T] eventQueue synchronized { - val index = (tail + 1) % maxCapacity - if (index == flag) { - flag = (flag + 1) % maxCapacity + val nextIndex = (tail + 1) % maxCapacity + // 首尾相遇 第一次循环队列满了,后续所有add动作 nextIndex和front都是相等的 front指针不断往前循环移动 + // When the first and last ends meet, the first circular queue is full, and all subsequent add actions nextIndex and front are equal. + // The front pointer continues to move forward in a circular motion. + if (nextIndex == front) { + front = (front + 1) % maxCapacity realSize += 1 } t = eventQueue(tail).asInstanceOf[T] eventQueue(tail) = event - tail = index + tail = nextIndex } t } @@ -51,18 +63,19 @@ class LoopArray[T](maxCapacity: Int) { } else if (index > _max) { throw new IllegalArgumentException("The index " + index + " must be less than " + _max) } - val _index = (flag + (index - realSize)) % maxCapacity + val _index = (front + (index - realSize + maxCapacity - 1)) % maxCapacity eventQueue(_index).asInstanceOf[T] } def clear(): Unit = eventQueue synchronized { - flag = 0 + front = 0 tail = 0 realSize = 0 (0 until maxCapacity).foreach(eventQueue(_) = null) } def fakeClear(): Unit = eventQueue synchronized { + clearEleNums = clearEleNums + size (0 until maxCapacity).foreach(eventQueue(_) = null) } @@ -73,16 +86,34 @@ class LoopArray[T](maxCapacity: Int) { if (_size == 0) { _size = 1 } - realSize + _size - 1 + realSize + _size } - private def filledSize = if (tail >= flag) tail - flag else tail + maxCapacity - flag + def fakeClearEleNums: Int = clearEleNums + + private def filledSize = { + if (tail == front && tail == 0) { + 0 + } else if (tail > front) { + tail - front + } else { + tail + maxCapacity - front + } + } def size: Int = filledSize def isFull: Boolean = filledSize == maxCapacity - 1 - def nonEmpty: Boolean = size > 0 + // If it is not empty, it means that the loop queue is full this round. + // 不为空 说明本轮 循环队列满了 + def isNextOneEmpty(): Boolean = { + + eventQueue(tail).asInstanceOf[T] == null + + } + + def isEmpty: Boolean = size == 0 def toList: List[T] = toIndexedSeq.toList diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala index 4b9b4570f14..1a2056be250 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala @@ -20,19 +20,16 @@ package org.apache.linkis.entrance.orchestrator.plugin import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.scheduler.EntranceGroupFactory +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.orchestrator.plugin.UserParallelOrchestratorPlugin import org.apache.linkis.rpc.Sender -import org.apache.linkis.server.BDPJettyServerHelper - -import org.apache.commons.lang3.StringUtils import java.util import java.util.concurrent.TimeUnit @@ -43,10 +40,6 @@ import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlugin with Logging { - private val SPLIT = "," - - private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory - private def getDefaultMaxRuningNum: Int = { EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue() } @@ -57,12 +50,12 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu private val configCache: LoadingCache[String, Integer] = CacheBuilder .newBuilder() .maximumSize(1000) - .expireAfterAccess(1, TimeUnit.HOURS) + .expireAfterAccess(EntranceConfiguration.USER_PARALLEL_REFLESH_TIME.getValue, TimeUnit.MINUTES) .expireAfterWrite(EntranceConfiguration.USER_PARALLEL_REFLESH_TIME.getValue, TimeUnit.MINUTES) .build(new CacheLoader[String, Integer]() { override def load(key: String): Integer = { - val (userCreatorLabel, engineTypeLabel) = fromKeyGetLabels(key) + val (userCreatorLabel, engineTypeLabel) = EntranceUtils.fromKeyGetLabels(key) val keyAndValue = Utils.tryAndWarnMsg { sender .ask(RequestQueryEngineConfigWithGlobalConfig(userCreatorLabel, engineTypeLabel)) @@ -75,13 +68,12 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu null == keyAndValue || !keyAndValue .containsKey(EntranceConfiguration.WDS_LINKIS_INSTANCE.key) ) { - logger.error( - s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}," + - s"will use default value ${EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue()}。All config map: ${BDPJettyServerHelper.gson - .toJson(keyAndValue)}" + logger.warn( + s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}, will use default value " ) } - val maxRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue, true) + val maxRunningJobs = EntranceGroupFactory.getUserMaxRunningJobs(keyAndValue) + logger.info(s"$key load orchestrator user maxRunningJobs=$maxRunningJobs") maxRunningJobs } @@ -102,27 +94,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu if (null == userCreatorLabel || null == engineTypeLabel) { return getDefaultMaxRuningNum } - configCache.get(getKey(userCreatorLabel, engineTypeLabel)) - } - - private def getKey( - userCreatorLabel: UserCreatorLabel, - engineTypeLabel: EngineTypeLabel - ): String = { - userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue - } - - private def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { - if (StringUtils.isBlank(key)) (null, null) - else { - val labelStringValues = key.split(SPLIT) - if (labelStringValues.length < 2) return (null, null) - val userCreatorLabel = labelFactory - .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) - val engineTypeLabel = labelFactory - .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) - (userCreatorLabel, engineTypeLabel) - } + configCache.get(EntranceUtils.getUserCreatorEcTypeKey(userCreatorLabel, engineTypeLabel)) } override def isReady: Boolean = true diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala index 60164ca58b8..5cd29c275a5 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala @@ -17,28 +17,47 @@ package org.apache.linkis.entrance.parser -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.conf.EntranceConfiguration.{ + SPARK3_VERSION_COERCION_DEPARTMENT, + SPARK3_VERSION_COERCION_SWITCH, + SPARK3_VERSION_COERCION_USERS +} import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceIllegalParamException} import org.apache.linkis.entrance.persistence.PersistenceManager import org.apache.linkis.entrance.timeout.JobTimeoutManager +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.conf.{DepartmentRequest, DepartmentResponse} +import org.apache.linkis.manager.common.conf.RMConfiguration import org.apache.linkis.manager.label.builder.factory.{ LabelBuilderFactory, LabelBuilderFactoryContext } +import org.apache.linkis.manager.label.conf.LabelCommonConfig import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.manager.label.entity.engine.{CodeLanguageLabel, UserCreatorLabel} -import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator +import org.apache.linkis.manager.label.entity.cluster.ClusterLabel +import org.apache.linkis.manager.label.entity.engine.{ + CodeLanguageLabel, + EngineType, + UserCreatorLabel +} +import org.apache.linkis.manager.label.utils.{EngineTypeLabelCreator, LabelUtil} import org.apache.linkis.protocol.constants.TaskConstant +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.rpc.Sender import org.apache.linkis.scheduler.queue.SchedulerEventState +import org.apache.linkis.storage.script.VariableParser import org.apache.commons.lang3.StringUtils import java.util import java.util.Date +import java.util.regex.Pattern import scala.collection.JavaConverters._ @@ -89,6 +108,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) s"${EntranceErrorCode.PARAM_CANNOT_EMPTY.getDesc}, labels is null" ) } + addUserToRuntime(submitUser, executeUser, configMap) // 3. set Code var code: String = null var runType: String = null @@ -112,12 +132,14 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) if (formatCode) code = format(code) jobRequest.setExecutionCode(code) // 4. parse label - val labels: util.Map[String, Label[_]] = buildLabel(labelMap) + var labels: util.HashMap[String, Label[_]] = buildLabel(labelMap) JobTimeoutManager.checkTimeoutLabel(labels) checkEngineTypeLabel(labels) generateAndVerifyCodeLanguageLabel(runType, labels) generateAndVerifyUserCreatorLabel(executeUser, labels) - + generateAndVerifyClusterLabel(labels) + // sparkVersion cover,only spark use + labels = sparkVersionCoercion(labels, executeUser, submitUser) jobRequest.setLabels(new util.ArrayList[Label[_]](labels.values())) jobRequest.setSource(source) jobRequest.setStatus(SchedulerEventState.Inited.toString) @@ -131,7 +153,8 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) private def checkEngineTypeLabel(labels: util.Map[String, Label[_]]): Unit = { val engineTypeLabel = labels.getOrDefault(LabelKeyConstant.ENGINE_TYPE_KEY, null) if (null == engineTypeLabel) { - val msg = s"You need to specify engineTypeLabel in labels, such as spark-2.4.3" + val msg = s"You need to specify engineTypeLabel in labels," + + s"such as spark-${LabelCommonConfig.SPARK_ENGINE_VERSION.getValue}" throw new EntranceIllegalParamException( EntranceErrorCode.LABEL_PARAMS_INVALID.getErrCode, EntranceErrorCode.LABEL_PARAMS_INVALID.getDesc + msg @@ -187,6 +210,22 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) } } + private def generateAndVerifyClusterLabel(labels: util.Map[String, Label[_]]): Unit = { + if (!Configuration.IS_MULTIPLE_YARN_CLUSTER) { + return + } + var clusterLabel = labels + .getOrDefault(LabelKeyConstant.YARN_CLUSTER_KEY, null) + .asInstanceOf[ClusterLabel] + if (clusterLabel == null) { + clusterLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[ClusterLabel]) + clusterLabel.setClusterName(RMConfiguration.DEFAULT_YARN_CLUSTER_NAME.getValue) + clusterLabel.setClusterType(RMConfiguration.DEFAULT_YARN_TYPE.getValue) + labels.put(clusterLabel.getLabelKey, clusterLabel) + } + } + private def parseToOldTask(params: util.Map[String, AnyRef]): JobRequest = { val jobReq = new JobRequest @@ -206,8 +245,10 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) jobReq.setExecuteUser(umUser) var executionCode = params.get(TaskConstant.EXECUTIONCODE).asInstanceOf[String] val _params = params.get(TaskConstant.PARAMS) + + addUserToRuntime(submitUser, umUser, _params) _params match { - case mapParams: java.util.Map[String, AnyRef] => jobReq.setParams(mapParams) + case mapParams: util.Map[String, AnyRef] => jobReq.setParams(mapParams) case _ => } val formatCode = params.get(TaskConstant.FORMATCODE).asInstanceOf[Boolean] @@ -242,38 +283,62 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) if (formatCode) executionCode = format(executionCode) jobReq.setExecutionCode(executionCode) } - val engineTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(executeApplicationName) + var engineTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(executeApplicationName) val runTypeLabel = labelBuilderFactory.createLabel[Label[_]](LabelKeyConstant.CODE_TYPE_KEY, runType) + val variableMap = + jobReq.getParams.get(VariableParser.VARIABLE).asInstanceOf[util.Map[String, String]] + if ( + null != variableMap && variableMap.containsKey(LabelCommonConfig.SPARK3_ENGINE_VERSION_CONF) + ) { + var version = variableMap.get(LabelCommonConfig.SPARK3_ENGINE_VERSION_CONF) + val pattern = Pattern.compile(EntranceUtils.sparkVersionRegex).matcher(version) + if (pattern.matches()) { + version = LabelCommonConfig.SPARK3_ENGINE_VERSION.getValue + } else { + version = LabelCommonConfig.SPARK_ENGINE_VERSION.getValue + } + engineTypeLabel = + EngineTypeLabelCreator.createEngineTypeLabel(EngineType.SPARK.toString, version) + } val userCreatorLabel = labelBuilderFactory .createLabel[Label[_]](LabelKeyConstant.USER_CREATOR_TYPE_KEY, umUser + "-" + creator) - val labelList = new util.ArrayList[Label[_]](3) - labelList.add(engineTypeLabel) - labelList.add(runTypeLabel) - labelList.add(userCreatorLabel) + var labels = new util.HashMap[String, Label[_]]() + labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, engineTypeLabel) + labels.put(LabelKeyConstant.CODE_TYPE_KEY, runTypeLabel) + labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, userCreatorLabel) if (jobReq.getParams != null) { val labelMap = params .getOrDefault(TaskConstant.LABELS, new util.HashMap[String, AnyRef]()) .asInstanceOf[util.Map[String, AnyRef]] - if (null != labelMap && !labelMap.isEmpty) { - val list: util.List[Label[_]] = - labelBuilderFactory.getLabels(labelMap) - labelList.addAll(list) - } + labels.putAll(buildLabel(labelMap)) } jobReq.setProgress("0.0") jobReq.setSource(source) // In order to be compatible with the code, let enginetype and runtype have the same attribute jobReq.setStatus(SchedulerEventState.Inited.toString) // Package labels - jobReq.setLabels(labelList) + // sparkVersion cover,only spark use + labels = sparkVersionCoercion(labels, umUser, submitUser) + jobReq.setLabels(new util.ArrayList[Label[_]](labels.values())) jobReq.setMetrics(new util.HashMap[String, AnyRef]()) jobReq.getMetrics.put(TaskConstant.JOB_SUBMIT_TIME, new Date(System.currentTimeMillis)) jobReq } - private def buildLabel(labelMap: util.Map[String, AnyRef]): util.Map[String, Label[_]] = { + private def addUserToRuntime(submitUser: String, umUser: String, _params: AnyRef): Unit = { + val runtimeMap: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]() + runtimeMap.put(TaskConstant.SUBMIT_USER, submitUser) + runtimeMap.put(TaskConstant.EXECUTE_USER, umUser) + _params match { + case map: util.Map[String, AnyRef] => + TaskUtils.addRuntimeMap(map, runtimeMap) + case _ => + } + } + + private def buildLabel(labelMap: util.Map[String, AnyRef]): util.HashMap[String, Label[_]] = { val labelKeyValueMap = new util.HashMap[String, Label[_]]() if (null != labelMap && !labelMap.isEmpty) { val list: util.List[Label[_]] = @@ -287,6 +352,64 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager) labelKeyValueMap } + private def sparkVersionCoercion( + labels: util.HashMap[String, Label[_]], + executeUser: String, + submitUser: String + ): util.HashMap[String, Label[_]] = { + // 个人>部门 + // 是否强制转换 + if (SPARK3_VERSION_COERCION_SWITCH && (null != labels && !labels.isEmpty)) { + val engineTypeLabel = labels.get(LabelKeyConstant.ENGINE_TYPE_KEY) + val engineType = LabelUtil.getFromLabelStr(engineTypeLabel.getStringValue, "engine") + val version = LabelUtil.getFromLabelStr(engineTypeLabel.getStringValue, "version") + if ( + engineType.equals(EngineType.SPARK.toString) && (!version.equals( + LabelCommonConfig.SPARK3_ENGINE_VERSION.getValue + )) + ) { + Utils.tryAndWarnMsg { + // 判断用户是否是个人配置中的一员 + if ( + SPARK3_VERSION_COERCION_USERS.contains(executeUser) || SPARK3_VERSION_COERCION_USERS + .contains(submitUser) + ) { + logger.info( + s"Spark version will be change 3.4.4,submitUser:${submitUser},executeUser:${executeUser} " + ) + labels.replace( + LabelKeyConstant.ENGINE_TYPE_KEY, + EngineTypeLabelCreator.createEngineTypeLabel( + EngineType.SPARK.toString, + LabelCommonConfig.SPARK3_ENGINE_VERSION.getValue + ) + ) + return labels + } + val executeUserDepartmentId = EntranceUtils.getUserDepartmentId(executeUser) + val submitUserDepartmentId = EntranceUtils.getUserDepartmentId(submitUser) + if ( + (StringUtils.isNotBlank(executeUserDepartmentId) && SPARK3_VERSION_COERCION_DEPARTMENT + .contains(executeUserDepartmentId)) || + (StringUtils.isNotBlank(submitUserDepartmentId) && SPARK3_VERSION_COERCION_DEPARTMENT + .contains(submitUserDepartmentId)) + ) { + logger.info(s"Spark version will be change 3.4.4 by department:${executeUser} ") + labels.replace( + LabelKeyConstant.ENGINE_TYPE_KEY, + EngineTypeLabelCreator.createEngineTypeLabel( + EngineType.SPARK.toString, + LabelCommonConfig.SPARK3_ENGINE_VERSION.getValue + ) + ) + return labels + } + }(s"error to Spark 3 version coercion: ${executeUser}") + } + } + labels; + } + // todo to format code using proper way private def format(code: String): String = code diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala index 111350806e3..2ba98438e8f 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala @@ -17,20 +17,12 @@ package org.apache.linkis.entrance.persistence -import org.apache.linkis.common.io.{FsPath, MetaData, Record} -import org.apache.linkis.common.io.resultset.ResultSet -import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.utils.Logging import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} -import org.apache.linkis.entrance.execute.StorePathExecuteRequest -import org.apache.linkis.entrance.job.{EntranceExecuteRequest, EntranceExecutionJob} -import org.apache.linkis.entrance.scheduler.cache.CacheOutputExecuteResponse -import org.apache.linkis.governance.common.entity.job.SubJobDetail import org.apache.linkis.scheduler.executer.{AliasOutputExecuteResponse, OutputExecuteResponse} import org.apache.linkis.scheduler.queue.Job -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriter} -import org.apache.linkis.storage.utils.FileSystemUtils +import org.apache.linkis.storage.resultset.ResultSetFactory -import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils class EntranceResultSetEngine extends ResultSetEngine with Logging { @@ -46,15 +38,11 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc ) } - case CacheOutputExecuteResponse(alias, output) => - if (ResultSetFactory.getInstance.isResultSetPath(output)) { - getDir(output) - } else { - throw new EntranceErrorException( - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc - ) - } + case _ => + throw new EntranceErrorException( + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc + ) } } @@ -64,7 +52,7 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { } else { val arr = str.split("/").filter(StringUtils.isNotBlank) if (arr.length <= 2) { - return str + str } else { str.substring(0, str.lastIndexOf("/")) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/PersistenceManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/PersistenceManager.scala index b60fa2d795b..b0cacb806b8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/PersistenceManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/PersistenceManager.scala @@ -18,9 +18,13 @@ package org.apache.linkis.entrance.persistence import org.apache.linkis.entrance.EntranceContext -import org.apache.linkis.scheduler.listener.{JobListener, ProgressListener} +import org.apache.linkis.scheduler.listener.{JobListener, JobRetryListener, ProgressListener} -abstract class PersistenceManager extends JobListener with ResultSetListener with ProgressListener { +abstract class PersistenceManager + extends JobListener + with ResultSetListener + with ProgressListener + with JobRetryListener { def getEntranceContext: EntranceContext diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala index 7f16dd24630..cb1b610e2b5 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala @@ -17,42 +17,31 @@ package org.apache.linkis.entrance.scheduler -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.conf.{CommonVars, Configuration} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.execute.EntranceJob +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.instance.label.client.InstanceLabelClient -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.manager.label.entity.engine.{ - ConcurrentEngineConnLabel, - EngineTypeLabel, - UserCreatorLabel -} -import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.manager.label.utils.LabelUtil -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.rpc.Sender import org.apache.linkis.scheduler.queue.{Group, GroupFactory, SchedulerEvent} import org.apache.linkis.scheduler.queue.parallelqueue.ParallelGroup +import org.apache.commons.collections.MapUtils import org.apache.commons.lang3.StringUtils import java.util import java.util.concurrent.TimeUnit import java.util.regex.Pattern -import scala.collection.JavaConverters._ - import com.google.common.cache.{Cache, CacheBuilder} class EntranceGroupFactory extends GroupFactory with Logging { @@ -63,7 +52,7 @@ class EntranceGroupFactory extends GroupFactory with Logging { .maximumSize(EntranceConfiguration.GROUP_CACHE_MAX.getValue) .build() - private val GROUP_MAX_CAPACITY = CommonVars("wds.linkis.entrance.max.capacity", 2000) + private val GROUP_MAX_CAPACITY = CommonVars("wds.linkis.entrance.max.capacity", 1000) private val SPECIFIED_USERNAME_REGEX = CommonVars("wds.linkis.entrance.specified.username.regex", "hduser.*") @@ -81,29 +70,19 @@ class EntranceGroupFactory extends GroupFactory with Logging { } override def getOrCreateGroup(event: SchedulerEvent): Group = { - val (labels, params) = event match { + val labels = event match { case job: EntranceJob => - (job.getJobRequest.getLabels, job.getJobRequest.getParams) + job.getJobRequest.getLabels + case _ => + throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } - val groupName = EntranceGroupFactory.getGroupNameByLabels(labels, params) + val groupName = EntranceGroupFactory.getGroupNameByLabels(labels) val cacheGroup = groupNameToGroups.getIfPresent(groupName) if (null == cacheGroup) synchronized { - val maxAskExecutorTimes = EntranceConfiguration.MAX_ASK_EXECUTOR_TIME.getValue.toLong - if (groupName.startsWith(EntranceGroupFactory.CONCURRENT)) { - if (null == groupNameToGroups.getIfPresent(groupName)) synchronized { - if (null == groupNameToGroups.getIfPresent(groupName)) { - val group = new ParallelGroup( - groupName, - 100, - EntranceConfiguration.CONCURRENT_FACTORY_MAX_CAPACITY.getValue - ) - group.setMaxRunningJobs(EntranceConfiguration.CONCURRENT_MAX_RUNNING_JOBS.getValue) - group.setMaxAskExecutorTimes(EntranceConfiguration.CONCURRENT_EXECUTOR_TIME.getValue) - groupNameToGroups.put(groupName, group) - return group - } - } + if (groupNameToGroups.getIfPresent(groupName) != null) { + return groupNameToGroups.getIfPresent(groupName) } + val maxAskExecutorTimes = EntranceConfiguration.MAX_ASK_EXECUTOR_TIME.getValue.toLong val sender: Sender = Sender.getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) val userCreatorLabel: UserCreatorLabel = LabelUtil.getUserCreatorLabel(labels) @@ -119,7 +98,7 @@ class EntranceGroupFactory extends GroupFactory with Logging { }( "Get user configurations from configuration server failed! Next use the default value to continue." ) - val maxRunningJobs = getUserMaxRunningJobs(keyAndValue) + val maxRunningJobs = EntranceGroupFactory.getUserMaxRunningJobs(keyAndValue) val initCapacity = GROUP_INIT_CAPACITY.getValue(keyAndValue) val maxCapacity = if (null != specifiedUsernameRegexPattern) { if (specifiedUsernameRegexPattern.matcher(userCreatorLabel.getUser).find()) { @@ -141,8 +120,11 @@ class EntranceGroupFactory extends GroupFactory with Logging { group.setMaxRunningJobs(maxRunningJobs) group.setMaxAskExecutorTimes(maxAskExecutorTimes) groupNameToGroups.put(groupName, group) + group + } + else { + cacheGroup } - groupNameToGroups.getIfPresent(groupName) } override def getGroup(groupName: String): Group = { @@ -156,105 +138,49 @@ class EntranceGroupFactory extends GroupFactory with Logging { group } - private def getUserMaxRunningJobs(keyAndValue: util.Map[String, String]): Int = { - var userDefinedRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) - var entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length - val labelList = new util.ArrayList[Label[_]]() - val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory - .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) - labelList.add(offlineRouteLabel) - var offlineIns: Array[ServiceInstance] = null - Utils.tryAndWarn { - offlineIns = InstanceLabelClient.getInstance - .getInstanceFromLabel(labelList) - .asScala - .filter(l => - null != l && l.getApplicationName - .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) - ) - .toArray - } - if (null != offlineIns) { - logger.info(s"There are ${offlineIns.length} offlining instance.") - entranceNum = entranceNum - offlineIns.length - } - /* - Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. - */ - if (0 >= entranceNum) { - logger.error( - s"Got ${entranceNum} ${Sender.getThisServiceInstance.getApplicationName} instances." - ) - entranceNum = 1 - } - Math.max( - EntranceConfiguration.ENTRANCE_INSTANCE_MIN.getValue, - userDefinedRunningJobs / entranceNum - ); - } - } object EntranceGroupFactory { - val CACHE = "_Cache" - - val CONCURRENT = "Concurrent_" - - def getGroupName( - creator: String, - user: String, - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - if (StringUtils.isNotEmpty(creator)) creator + "_" + user + cache - else EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue + "_" + user + cache - } - - def getGroupNameByLabels( - labels: java.util.List[Label[_]], - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - - val userCreator = labels.asScala.find(_.isInstanceOf[UserCreatorLabel]) - val engineType = labels.asScala.find(_.isInstanceOf[EngineTypeLabel]) - val concurrent = labels.asScala.find(_.isInstanceOf[ConcurrentEngineConnLabel]) - if (userCreator.isEmpty || engineType.isEmpty) { + /** + * Entrance group rule creator_username_engineType eg:IDE_PEACEWONG_SPARK + * @param labels + * @param params + * @return + */ + def getGroupNameByLabels(labels: java.util.List[Label[_]]): String = { + val userCreatorLabel = LabelUtil.getUserCreatorLabel(labels) + val engineTypeLabel = LabelUtil.getEngineTypeLabel(labels) + if (null == userCreatorLabel || null == engineTypeLabel) { throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } + val groupName = + userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + groupName + } - if (concurrent.isDefined) { - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - val groupName = CONCURRENT + engineTypeLabel.getEngineType - groupName - - } else { - val userCreatorLabel = userCreator.get.asInstanceOf[UserCreatorLabel] - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - val groupName = - userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + cache - groupName - } + /** + * User task concurrency control is controlled for multiple Entrances, which will be evenly + * distributed based on the number of existing Entrances + * @param keyAndValue + * @return + */ + def getUserMaxRunningJobs(keyAndValue: util.Map[String, String]): Int = { + val userDefinedRunningJobs = + if ( + MapUtils.isNotEmpty(keyAndValue) && keyAndValue.containsKey( + EntranceConfiguration.WDS_LINKIS_ENTRANCE_RUNNING_JOB.key + ) + ) { + EntranceConfiguration.WDS_LINKIS_ENTRANCE_RUNNING_JOB.getValue(keyAndValue) + } else { + EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) + } + val entranceNum = EntranceUtils.getRunningEntranceNumber() + Math.max( + EntranceConfiguration.ENTRANCE_INSTANCE_MIN.getValue, + userDefinedRunningJobs / entranceNum + ) } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala index d5de2cc2da6..1638b0fb1ce 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala @@ -28,6 +28,11 @@ class EntranceSchedulerContext extends SchedulerContext { private var consumerManager: ConsumerManager = _ private var executorManager: ExecutorManager = _ + private var offlineFlag: Boolean = false + + def setOfflineFlag(offlineFlag: Boolean): Unit = this.offlineFlag = offlineFlag + def getOfflineFlag: Boolean = this.offlineFlag + def this( groupFactory: GroupFactory, consumerManager: ConsumerManager, diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala deleted file mode 100644 index 47a6ce9e9ee..00000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.scheduler.executer.OutputExecuteResponse - -case class CacheOutputExecuteResponse(alias: String, output: String) extends OutputExecuteResponse { - override def getOutput: String = output -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala deleted file mode 100644 index 65bbbd39b4f..00000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.common.io.FsPath -import org.apache.linkis.common.utils.Utils -import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ -import org.apache.linkis.entrance.exception.CacheNotReadyException -import org.apache.linkis.entrance.execute.EntranceJob -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.utils.JobHistoryHelper -import org.apache.linkis.governance.common.entity.job.JobRequest -import org.apache.linkis.manager.label.constant.LabelKeyConstant -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils -import org.apache.linkis.scheduler.SchedulerContext -import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ -import org.apache.linkis.scheduler.exception.SchedulerErrorException -import org.apache.linkis.scheduler.executer.SuccessExecuteResponse -import org.apache.linkis.scheduler.queue.Group -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.server.BDPJettyServerHelper -import org.apache.linkis.storage.FSFactory -import org.apache.linkis.storage.fs.FileSystem - -import org.apache.commons.io.FilenameUtils -import org.apache.commons.lang3.StringUtils - -import java.util.concurrent.ExecutorService - -import scala.collection.JavaConverters._ - -import com.google.common.collect.Lists - -class ReadCacheConsumer( - schedulerContext: SchedulerContext, - executeService: ExecutorService, - private var group: Group, - persistenceManager: PersistenceManager -) extends FIFOUserConsumer(schedulerContext, executeService, group) { - - override protected def loop(): Unit = { - val event = Option(getConsumeQueue.take()) - event.foreach { - case job: EntranceJob => - job.getJobRequest match { - case jobRequest: JobRequest => - Utils.tryCatch { - val engineTpyeLabel = jobRequest.getLabels.asScala - .filter(l => l.getLabelKey.equalsIgnoreCase(LabelKeyConstant.ENGINE_TYPE_KEY)) - .headOption - .getOrElse(null) - val labelStrList = jobRequest.getLabels.asScala.map { case l => - l.getStringValue - }.toList - if (null == engineTpyeLabel) { - logger.error( - "Invalid engineType null, cannot process. jobReq : " + BDPJettyServerHelper.gson - .toJson(jobRequest) - ) - throw CacheNotReadyException( - INVALID_ENGINETYPE_NULL.getErrorCode, - INVALID_ENGINETYPE_NULL.getErrorDesc - ) - } - val readCacheBefore: Long = TaskUtils - .getRuntimeMap(job.getParams) - .getOrDefault(TaskConstant.READ_CACHE_BEFORE, 300L: java.lang.Long) - .asInstanceOf[Long] - val cacheResult = JobHistoryHelper.getCache( - jobRequest.getExecutionCode, - jobRequest.getExecuteUser, - labelStrList.asJava, - readCacheBefore - ) - if (cacheResult != null && StringUtils.isNotBlank(cacheResult.getResultLocation)) { - val resultSets = listResults(cacheResult.getResultLocation, job.getUser) - if (resultSets.size() > 0) { - for (resultSet: FsPath <- resultSets.asScala) { - val alias = FilenameUtils.getBaseName(resultSet.getPath) - val output = FsPath - .getFsPath( - cacheResult.getResultLocation, - FilenameUtils.getName(resultSet.getPath) - ) - .getSchemaPath -// persistenceManager.onResultSetCreated(job, new CacheOutputExecuteResponse(alias, output)) - throw CacheNotReadyException( - INVALID_RESULTSETS.getErrorCode, - INVALID_RESULTSETS.getErrorDesc - ) - // todo check - } -// persistenceManager.onResultSizeCreated(job, resultSets.size()) - } - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - job.transitionCompleted(SuccessExecuteResponse(), "Result found in cache") - } else { - logger.info("Cache not found, submit to normal consumer.") - submitToExecute(job) - } - } { t => - logger.warn("Read cache failed, submit to normal consumer: ", t) - submitToExecute(job) - } - case _ => - } - case _ => - } - } - - private def listResults(resultLocation: String, user: String) = { - val dirPath = FsPath.getFsPath(resultLocation) - val fileSystem = FSFactory.getFsByProxyUser(dirPath, user).asInstanceOf[FileSystem] - Utils.tryFinally { - fileSystem.init(null) - if (fileSystem.exists(dirPath)) { - fileSystem.listPathWithError(dirPath).getFsPaths - } else { - Lists.newArrayList[FsPath]() - } - }(Utils.tryQuietly(fileSystem.close())) - } - - private def submitToExecute(job: EntranceJob): Unit = { - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.READ_FROM_CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - val groupName = schedulerContext.getOrCreateGroupFactory.getOrCreateGroup(job).getGroupName - val consumer = schedulerContext.getOrCreateConsumerManager.getOrCreateConsumer(groupName) - val index = consumer.getConsumeQueue.offer(job) - // index.map(getEventId(_, groupName)).foreach(job.setId) - if (index.isEmpty) { - throw new SchedulerErrorException( - JOB_QUEUE_IS_FULL.getErrorCode, - JOB_QUEUE_IS_FULL.getErrorDesc - ) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala deleted file mode 100644 index a4cba19f34e..00000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.scheduler.EntranceGroupFactory -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager - -class ReadCacheConsumerManager(maxParallelismUsers: Int, persistenceManager: PersistenceManager) - extends ParallelConsumerManager(maxParallelismUsers) { - - override protected def createConsumer(groupName: String): FIFOUserConsumer = { - val group = getSchedulerContext.getOrCreateGroupFactory.getGroup(groupName) - if (groupName.endsWith(EntranceGroupFactory.CACHE)) { - logger.info("Create cache consumer with group: " + groupName) - new ReadCacheConsumer( - getSchedulerContext, - getOrCreateExecutorService, - group, - persistenceManager - ) - } else { - logger.info("Create normal consumer with group: " + groupName) - new FIFOUserConsumer(getSchedulerContext, getOrCreateExecutorService, group) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala index aaaf131bd86..4e624303169 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala @@ -38,8 +38,8 @@ class JobTimeoutManager extends Logging { private[this] final val timeoutJobByName: ConcurrentMap[String, EntranceJob] = new ConcurrentHashMap[String, EntranceJob] - val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue - val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue + private val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + private val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue def add(jobKey: String, job: EntranceJob): Unit = { logger.info(s"Adding timeout job: ${job.getId()}") @@ -77,75 +77,75 @@ class JobTimeoutManager extends Logging { } private def timeoutDetective(): Unit = { - if (timeoutCheck) { - def checkAndSwitch(job: EntranceJob): Unit = { - logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") - val currentTimeSeconds = System.currentTimeMillis() / 1000 - // job.isWaiting == job in queue - val jobScheduleStartTimeSeconds = - if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds - val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds - val jobRunningStartTimeSeconds = - if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds - val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds - if (!job.isCompleted) { - job.jobRequest.getLabels.asScala foreach { - case queueTimeOutLabel: JobQueuingTimeoutLabel => - if ( - job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", - null - ) - } - case jobRunningTimeoutLabel: JobRunningTimeoutLabel => - if ( - job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", - null - ) - } - case _ => - } + def checkAndSwitch(job: EntranceJob): Unit = { + logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") + val currentTimeSeconds = System.currentTimeMillis() / 1000 + // job.isWaiting == job in queue + val jobScheduleStartTimeSeconds = + if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds + val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds + val jobRunningStartTimeSeconds = + if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds + val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds + if (!job.isCompleted) { + job.jobRequest.getLabels.asScala foreach { + case queueTimeOutLabel: JobQueuingTimeoutLabel => + if ( + job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", + null + ) + } + case jobRunningTimeoutLabel: JobRunningTimeoutLabel => + if ( + job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", + null + ) + } + case _ => } } - - timeoutJobByName.asScala.foreach(item => { - logger.info(s"Running timeout detection!") - synchronized { - jobCompleteDelete(item._1) - if (jobExist(item._1)) checkAndSwitch(item._2) - } - }) } + + timeoutJobByName.asScala.foreach(item => { + logger.info(s"Running timeout detection!") + synchronized { + jobCompleteDelete(item._1) + if (jobExist(item._1)) checkAndSwitch(item._2) + } + }) } // Thread periodic scan timeout task - val woker = Utils.defaultScheduler.scheduleAtFixedRate( - new Runnable() { - - override def run(): Unit = { - Utils.tryCatch { - timeoutDetective() - } { case t: Throwable => - logger.error(s"TimeoutDetective task failed. ${t.getMessage}", t) + if (timeoutCheck) { + val woker = Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + + override def run(): Unit = { + Utils.tryCatch { + timeoutDetective() + } { case t: Throwable => + logger.warn(s"TimeoutDetective task failed. ${t.getMessage}", t) + } } - } - }, - 0, - timeoutScanInterval, - TimeUnit.SECONDS - ) + }, + 0, + timeoutScanInterval, + TimeUnit.SECONDS + ) + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala index 1311374fc1e..e8ba06ef094 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/CommonLogPathUtils.scala @@ -19,13 +19,17 @@ package org.apache.linkis.entrance.utils import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.utils.GovernanceUtils +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.fs.FileSystem import org.apache.linkis.storage.utils.{FileSystemUtils, StorageConfiguration, StorageUtils} object CommonLogPathUtils { - def buildCommonPath(commonPath: String): Unit = { + def buildCommonPath(commonPath: String, isResPath: Boolean): Unit = { val fileSystem = getRootFs(commonPath) fileSystem.init(null) val realPath: String = if (commonPath.endsWith("/")) { @@ -38,6 +42,16 @@ object CommonLogPathUtils { FileSystemUtils.mkdirs(fileSystem, fsPath, StorageUtils.getJvmUser) fileSystem.setPermission(fsPath, "770") } + // create defalut creator path + if (isResPath) { + val defaultPath = + GovernanceUtils.getResultParentPath(GovernanceUtils.LINKIS_DEFAULT_RES_CREATOR) + val resPath = new FsPath(defaultPath) + if (!fileSystem.exists(resPath)) { + FileSystemUtils.mkdirs(fileSystem, resPath, StorageUtils.getJvmUser) + fileSystem.setPermission(resPath, "770") + } + } Utils.tryQuietly(fileSystem.close()) } @@ -52,4 +66,21 @@ object CommonLogPathUtils { } } + def getResultParentPath(jobRequest: JobRequest): String = { + val userCreator = LabelUtil.getUserCreatorLabel(jobRequest.getLabels) + val creator = + if (null == userCreator) EntranceConfiguration.DEFAULT_CREATE_SERVICE.getValue + else userCreator.getCreator + GovernanceUtils.getResultParentPath(creator) + } + + def getResultPath(jobRequest: JobRequest): String = { + val userCreator = LabelUtil.getUserCreatorLabel(jobRequest.getLabels) + val creator = + if (null == userCreator) EntranceConfiguration.DEFAULT_CREATE_SERVICE.getValue + else userCreator.getCreator + val parentPath = GovernanceUtils.getResultParentPath(creator) + parentPath + "/" + jobRequest.getExecuteUser + "/" + jobRequest.getId + } + } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala new file mode 100644 index 00000000000..f397aeb2b21 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala @@ -0,0 +1,446 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.utils + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.log.LogUtils +import org.apache.linkis.common.utils.{Logging, SHAUtils, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary +import org.apache.linkis.entrance.exception.EntranceRPCException +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.conf.{DepartmentRequest, DepartmentResponse} +import org.apache.linkis.instance.label.client.InstanceLabelClient +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.conf.LabelCommonConfig +import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.{EngineType, EngineTypeLabel, UserCreatorLabel} +import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.utils.{EngineTypeLabelCreator, LabelUtil} +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.rpc.Sender +import org.apache.linkis.server.BDPJettyServerHelper + +import org.apache.commons.collections.MapUtils +import org.apache.commons.lang3.StringUtils +import org.apache.http.client.config.RequestConfig +import org.apache.http.client.methods.{CloseableHttpResponse, HttpPost} +import org.apache.http.entity.{ContentType, StringEntity} +import org.apache.http.impl.client.{BasicCookieStore, CloseableHttpClient, HttpClients} +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager +import org.apache.http.util.EntityUtils + +import java.{lang, util} +import java.nio.charset.StandardCharsets +import java.util.{HashMap, Map} + +import scala.collection.JavaConverters.asScalaBufferConverter + +object EntranceUtils extends Logging { + + private val SPLIT = "," + + private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory + + val sparkVersionRegex = "^3(\\.\\d+)*$" + + protected val connectionManager = new PoolingHttpClientConnectionManager + protected val cookieStore = new BasicCookieStore + + private val httpClient: CloseableHttpClient = HttpClients + .custom() + .setDefaultCookieStore(cookieStore) + .setMaxConnTotal(EntranceConfiguration.DOCTOR_HTTP_MAX_CONNECT) + .setMaxConnPerRoute(EntranceConfiguration.DOCTOR_HTTP_MAX_CONNECT / 2) + .setConnectionManager(connectionManager) + .build() + + def getUserCreatorEcTypeKey( + userCreatorLabel: UserCreatorLabel, + engineTypeLabel: EngineTypeLabel + ): String = { + userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue + } + + def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { + if (StringUtils.isBlank(key)) (null, null) + else { + val labelStringValues = key.split(SPLIT) + if (labelStringValues.length < 2) return (null, null) + val userCreatorLabel = labelFactory + .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) + val engineTypeLabel = labelFactory + .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) + (userCreatorLabel, engineTypeLabel) + } + } + + def getDefaultCreatorECTypeKey(creator: String, ecType: String): String = { + val userCreatorLabel = + labelFactory.createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY) + val ecTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(ecType) + userCreatorLabel.setUser("*") + userCreatorLabel.setCreator(creator) + getUserCreatorEcTypeKey(userCreatorLabel, ecTypeLabel) + } + + def getRunningEntranceNumber(): Int = { + val entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length + val labelList = new util.ArrayList[Label[_]]() + val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory + .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) + labelList.add(offlineRouteLabel) + var offlineIns: Array[ServiceInstance] = null + Utils.tryAndWarn { + offlineIns = InstanceLabelClient.getInstance + .getInstanceFromLabel(labelList) + .asScala + .filter(l => + null != l && l.getApplicationName + .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) + ) + .toArray + } + val entranceRealNumber = if (null != offlineIns) { + logger.info(s"There are ${offlineIns.length} offlining instance.") + entranceNum - offlineIns.length + } else { + entranceNum + } + /* + Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. + */ + if (entranceRealNumber <= 0) { + logger.error( + s"Got ${entranceRealNumber} ${Sender.getThisServiceInstance.getApplicationName} instances." + ) + 1 + } else { + entranceRealNumber + } + } + + /** + * 获取用户部门ID + */ + def getUserDepartmentId(username: String): String = { + var departmentId = "" + val sender: Sender = + Sender.getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + val responseSubmitUser = sender.ask(new DepartmentRequest(username)) + responseSubmitUser match { + case departmentSubmitUser: DepartmentResponse => + if (StringUtils.isNotBlank(departmentSubmitUser.departmentId)) { + departmentId = departmentSubmitUser.departmentId + } + case _ => + } + departmentId + } + + /** + * 动态引擎类型选择 + */ + def getDynamicEngineType(sql: String, logAppender: java.lang.StringBuilder): String = { + val defaultEngineType = "spark" + + if (!EntranceConfiguration.AI_SQL_DYNAMIC_ENGINE_SWITCH) { + return defaultEngineType + } + + logger.info(s"AISQL automatically switches engines and begins to call Doctoris") + + val params = new util.HashMap[String, AnyRef]() + params.put("sql", sql) + params.put("highStability", "") + params.put("queueResourceUsage", "") + + val request = DoctorRequest( + apiUrl = EntranceConfiguration.DOCTOR_DYNAMIC_ENGINE_URL, + params = params, + defaultValue = defaultEngineType, + successMessage = "Aisql intelligent selection engines, Suggest", + exceptionMessage = "Aisql intelligent selection component exception" + ) + + val response = callDoctorService(request, logAppender) + response.result + } + + def dealsparkDynamicConf( + jobRequest: JobRequest, + logAppender: lang.StringBuilder, + params: util.Map[String, AnyRef] + ): Unit = { + // deal with spark3 dynamic allocation conf + // 1.只有spark3需要处理动态规划参数 2.用户未指定模板名称,则设置默认值与spark底层配置保持一致,否则使用用户模板中指定的参数 + val properties = new util.HashMap[String, AnyRef]() + val label: EngineTypeLabel = LabelUtil.getEngineTypeLabel(jobRequest.getLabels) + val sparkDynamicAllocationEnabled: Boolean = + EntranceConfiguration.SPARK_DYNAMIC_ALLOCATION_ENABLED + if ( + sparkDynamicAllocationEnabled && label.getEngineType.equals( + EngineType.SPARK.toString + ) && label.getVersion.contains(LabelCommonConfig.SPARK3_ENGINE_VERSION.getValue) + ) { + properties.put( + EntranceConfiguration.SPARK_EXECUTOR_CORES.key, + EntranceConfiguration.SPARK_EXECUTOR_CORES.getValue + ) + properties.put( + EntranceConfiguration.SPARK_EXECUTOR_MEMORY.key, + EntranceConfiguration.SPARK_EXECUTOR_MEMORY.getValue + ) + properties.put( + EntranceConfiguration.SPARK_DYNAMIC_ALLOCATION_MAX_EXECUTORS.key, + EntranceConfiguration.SPARK_DYNAMIC_ALLOCATION_MAX_EXECUTORS.getValue + ) + properties.put( + EntranceConfiguration.SPARK_EXECUTOR_INSTANCES.key, + EntranceConfiguration.SPARK_EXECUTOR_INSTANCES.getValue + ) + properties.put( + EntranceConfiguration.SPARK_EXECUTOR_MEMORY_OVERHEAD.key, + EntranceConfiguration.SPARK_EXECUTOR_MEMORY_OVERHEAD.getValue + ) + properties.put( + EntranceConfiguration.SPARK3_PYTHON_VERSION.key, + EntranceConfiguration.SPARK3_PYTHON_VERSION.getValue + ) + Utils.tryAndWarn { + val extraConfs: String = + EntranceConfiguration.SPARK_DYNAMIC_ALLOCATION_ADDITIONAL_CONFS + if (StringUtils.isNotBlank(extraConfs)) { + val confs: Array[String] = extraConfs.split(",") + for (conf <- confs) { + val confKey: String = conf.split("=")(0) + val confValue: String = conf.split("=")(1) + properties.put(confKey, confValue) + } + } + } + logAppender.append( + LogUtils + .generateInfo(s"use spark3 default conf. \n") + ) + TaskUtils.addStartupMap(params, properties) + } + } + + /** + * 敏感信息SQL检查 + */ + def sensitiveSqlCheck( + code: String, + codeType: String, + engine: String, + user: String, + logAppender: java.lang.StringBuilder + ): (Boolean, String) = { + val params = new util.HashMap[String, AnyRef]() + params.put("code", code) + params.put("user", user) + params.put("engine", engine) + params.put("codeType", codeType) + + val request = DoctorRequest( + apiUrl = EntranceConfiguration.DOCTOR_ENCRYPT_SQL_URL, + params = params, + defaultValue = "false", + successMessage = "Sensitive SQL Check result", + exceptionMessage = "Sensitive SQL Check exception" + ) + + val response = callDoctorService(request, logAppender) + (response.result.toBoolean, response.reason) + } + + /** + * Doctor服务调用通用框架 + */ + case class DoctorRequest( + apiUrl: String, + params: util.Map[String, AnyRef], + defaultValue: String, + successMessage: String, + exceptionMessage: String + ) + + case class DoctorResponse( + success: Boolean, + result: String, + reason: String = "", + duration: Double = 0.0 + ) + + /** + * 通用Doctor服务调用方法 + */ + private def callDoctorService( + request: DoctorRequest, + logAppender: java.lang.StringBuilder + ): DoctorResponse = { + // 检查必要的配置参数 + if (!isValidDoctorConfiguration()) { + logInfo(s"${request.exceptionMessage}, using default: ${request.defaultValue}", logAppender) + return DoctorResponse(success = false, result = request.defaultValue) + } + + try { + val startTime = System.currentTimeMillis() + val url = buildDoctorRequestUrl(request.apiUrl) + val response = executeDoctorHttpRequest(url, request.params) + + if (StringUtils.isBlank(response)) { + return DoctorResponse(success = false, result = request.defaultValue) + } + + parseDoctorResponse(response, startTime, request, logAppender) + } catch { + case e: Exception => + logger.warn(s"${request.exceptionMessage}: params: ${request.params}", e) + logInfo(s"${request.exceptionMessage}, using default: ${request.defaultValue}", logAppender) + DoctorResponse(success = false, result = request.defaultValue) + } + } + + /** + * 检查Doctor配置参数是否有效 + */ + private def isValidDoctorConfiguration(): Boolean = { + StringUtils.isNotBlank(EntranceConfiguration.LINKIS_SYSTEM_NAME) && + StringUtils.isNotBlank(EntranceConfiguration.DOCTOR_SIGNATURE_TOKEN) && + StringUtils.isNotBlank(EntranceConfiguration.DOCTOR_CLUSTER) && + StringUtils.isNotBlank(EntranceConfiguration.DOCTOR_URL) + } + + /** + * 构建Doctor请求URL + */ + private def buildDoctorRequestUrl(apiUrl: String): String = { + val timestampStr = String.valueOf(System.currentTimeMillis) + val signature = SHAUtils.Encrypt( + SHAUtils.Encrypt( + EntranceConfiguration.LINKIS_SYSTEM_NAME + EntranceConfiguration.DOCTOR_NONCE + timestampStr, + null + ) + EntranceConfiguration.DOCTOR_SIGNATURE_TOKEN, + null + ) + + (EntranceConfiguration.DOCTOR_URL + apiUrl) + .replace("$app_id", EntranceConfiguration.LINKIS_SYSTEM_NAME) + .replace("$timestamp", timestampStr) + .replace("$nonce", EntranceConfiguration.DOCTOR_NONCE) + .replace("$signature", signature) + } + + /** + * 执行Doctor HTTP请求 + */ + private def executeDoctorHttpRequest(url: String, params: util.Map[String, AnyRef]): String = { + val httpPost = new HttpPost(url) + // 添加通用参数 + params.put("cluster", EntranceConfiguration.DOCTOR_CLUSTER) + + val json = BDPJettyServerHelper.gson.toJson(params) + val requestConfig = RequestConfig + .custom() + .setConnectTimeout(EntranceConfiguration.DOCTOR_REQUEST_TIMEOUT) + .setConnectionRequestTimeout(EntranceConfiguration.DOCTOR_REQUEST_TIMEOUT) + .setSocketTimeout(EntranceConfiguration.DOCTOR_REQUEST_TIMEOUT) + .build() + + val entity = new StringEntity( + json, + ContentType.create(ContentType.APPLICATION_JSON.getMimeType, StandardCharsets.UTF_8.toString) + ) + entity.setContentEncoding(StandardCharsets.UTF_8.toString) + httpPost.setConfig(requestConfig) + httpPost.setEntity(entity) + + val execute = httpClient.execute(httpPost) + EntityUtils.toString(execute.getEntity, StandardCharsets.UTF_8.toString) + } + + /** + * 解析Doctor响应结果 + */ + private def parseDoctorResponse( + responseStr: String, + startTime: Long, + request: DoctorRequest, + logAppender: java.lang.StringBuilder + ): DoctorResponse = { + try { + val endTime = System.currentTimeMillis() + val responseMapJson: Map[String, Object] = + BDPJettyServerHelper.gson.fromJson(responseStr, classOf[Map[_, _]]) + + if (MapUtils.isNotEmpty(responseMapJson) && responseMapJson.containsKey("data")) { + val dataMap = MapUtils.getMap(responseMapJson, "data") + val duration = (endTime - startTime) / 1000.0 + + // 根据不同的API返回不同的结果 + if (request.apiUrl.contains("plaintext")) { + // 敏感信息检查API + val sensitive = dataMap.get("sensitive").toString.toBoolean + val reason = dataMap.get("reason").toString + logInfo( + s"${request.successMessage}: $sensitive, This decision took $duration seconds", + logAppender + ) + DoctorResponse( + success = true, + result = sensitive.toString, + reason = reason, + duration = duration + ) + } else { + // 动态引擎选择API + val engineType = dataMap.get("engine").toString + val reason = dataMap.get("reason").toString + logInfo( + s"${request.successMessage}: $engineType, Hit rules: $reason, This decision took $duration seconds", + logAppender + ) + DoctorResponse(success = true, result = engineType, reason = reason, duration = duration) + } + } else { + throw new EntranceRPCException( + EntranceErrorCodeSummary.DOCTORIS_ERROR.getErrorCode, + EntranceErrorCodeSummary.DOCTORIS_ERROR.getErrorDesc + ) + } + } catch { + case e: Exception => + logger.warn(s"Doctoris返回数据解析失败:json: $responseStr", e) + logInfo(s"${request.exceptionMessage}, using default: ${request.defaultValue}", logAppender) + DoctorResponse(success = false, result = request.defaultValue) + } + } + + /** + * 记录日志信息 + */ + private def logInfo(message: String, logAppender: java.lang.StringBuilder): Unit = { + logAppender.append(LogUtils.generateInfo(s"$message\n")) + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala index ec29128889c..e6cfbe49429 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala @@ -17,6 +17,7 @@ package org.apache.linkis.entrance.utils +import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration @@ -45,7 +46,7 @@ import com.google.common.net.InetAddresses object JobHistoryHelper extends Logging { private val sender = - Sender.getSender(EntranceConfiguration.JOBHISTORY_SPRING_APPLICATION_NAME.getValue) + Sender.getSender(Configuration.JOBHISTORY_SPRING_APPLICATION_NAME.getValue) private val SUCCESS_FLAG = 0 @@ -69,6 +70,11 @@ object JobHistoryHelper extends Logging { else task.getStatus } + def getProgressByTaskID(taskID: Long): String = { + val task = getTaskByTaskID(taskID) + if (task == null) "0" else task.getProgress + } + def getRequestIpAddr(req: HttpServletRequest): String = { val addrList = List( Option(req.getHeader("x-forwarded-for")).getOrElse("").split(",")(0), @@ -123,6 +129,143 @@ object JobHistoryHelper extends Logging { sender.ask(jobReqBatchUpdate) } + /** + * Get all consume queue task and batch update instances(获取所有消费队列中的任务进行批量更新) + * + * @param taskIdList + * @param retryWhenUpdateFail + */ + def updateAllConsumeQueueTask( + taskIdList: util.List[Long], + retryWhenUpdateFail: Boolean = false + ): Unit = { + + if (taskIdList.isEmpty) return + + val updateTaskIds = new util.ArrayList[Long]() + + if ( + EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue > 0 && + taskIdList.size() > EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue + ) { + for (i <- 0 until EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue) { + updateTaskIds.add(taskIdList.get(i)) + } + } else { + updateTaskIds.addAll(taskIdList) + } + val list = new util.ArrayList[Long]() + list.addAll(taskIdList) + try { + val successTaskIds = updateBatchInstancesEmpty(updateTaskIds) + if (retryWhenUpdateFail) { + list.removeAll(successTaskIds) + } else { + list.removeAll(updateTaskIds) + } + } catch { + case e: Exception => + logger.warn("update batch instances failed, wait for retry", e) + Thread.sleep(1000) + } + updateAllConsumeQueueTask(list, retryWhenUpdateFail) + + } + + /** + * Batch update instances(批量更新instances字段) + * + * @param taskIdList + * @return + */ + def updateBatchInstancesEmpty(taskIdList: util.List[Long]): util.List[Long] = { + val jobReqList = new util.ArrayList[JobRequest]() + taskIdList.asScala.foreach(taskID => { + val jobRequest = new JobRequest + jobRequest.setId(taskID) + jobRequest.setInstances("") + jobReqList.add(jobRequest) + }) + val jobReqBatchUpdate = JobReqBatchUpdate(jobReqList) + Utils.tryCatch { + val response = sender.ask(jobReqBatchUpdate) + response match { + case resp: util.List[JobRespProtocol] => + // todo filter success data, rpc have bug + // resp.asScala + // .filter(r => + // r.getStatus == SUCCESS_FLAG && r.getData.containsKey(JobRequestConstants.JOB_ID) + // ) + // .map(_.getData.get(JobRequestConstants.JOB_ID).asInstanceOf[java.lang.Long]) + // .toList + + taskIdList + case _ => + throw JobHistoryFailedException( + "update batch instances from jobhistory not a correct List type" + ) + } + } { + case errorException: ErrorException => throw errorException + case e: Exception => + val e1 = + JobHistoryFailedException( + s"update batch instances ${taskIdList.asScala.mkString(",")} error" + ) + e1.initCause(e) + throw e + } + } + + /** + * query wait for failover task(获取待故障转移的任务) + * + * @param reqMap + * @param statusList + * @param startTimestamp + * @param limit + * @return + */ + def queryWaitForFailoverTask( + reqMap: util.Map[String, java.lang.Long], + statusList: util.List[String], + startTimestamp: Long, + limit: Int + ): util.List[JobRequest] = { + val requestFailoverJob = RequestFailoverJob(reqMap, statusList, startTimestamp, limit) + val tasks = Utils.tryCatch { + val response = sender.ask(requestFailoverJob) + response match { + case responsePersist: JobRespProtocol => + val status = responsePersist.getStatus + if (status != SUCCESS_FLAG) { + logger.error(s"query from jobHistory status failed, status is $status") + throw JobHistoryFailedException("query from jobHistory status failed") + } + val data = responsePersist.getData + data.get(JobRequestConstants.JOB_HISTORY_LIST) match { + case tasks: List[JobRequest] => + tasks.asJava + case _ => + throw JobHistoryFailedException( + s"query from jobhistory not a correct List type, instances ${reqMap.keySet()}" + ) + } + case _ => + logger.error("get query response incorrectly") + throw JobHistoryFailedException("get query response incorrectly") + } + } { + case errorException: ErrorException => throw errorException + case e: Exception => + val e1 = + JobHistoryFailedException(s"query failover task error, instances ${reqMap.keySet()} ") + e1.initCause(e) + throw e + } + tasks + } + private def getTaskByTaskID(taskID: Long): JobRequest = { val jobRequest = new JobRequest jobRequest.setId(taskID) @@ -176,15 +319,15 @@ object JobHistoryHelper extends Logging { val ecResourceMap = if (resourceInfo == null) new util.HashMap[String, ResourceWithStatus] else resourceInfo if (resourceMap != null) { - resourceMap.asInstanceOf[util.HashMap[String, ResourceWithStatus]].putAll(ecResourceMap) + resourceMap.asInstanceOf[util.Map[String, ResourceWithStatus]].putAll(ecResourceMap) } else { metricsMap.put(TaskConstant.JOB_YARNRESOURCE, ecResourceMap) } - var engineInstanceMap: util.HashMap[String, AnyRef] = null + var engineInstanceMap: util.Map[String, AnyRef] = null if (metricsMap.containsKey(TaskConstant.JOB_ENGINECONN_MAP)) { engineInstanceMap = metricsMap .get(TaskConstant.JOB_ENGINECONN_MAP) - .asInstanceOf[util.HashMap[String, AnyRef]] + .asInstanceOf[util.Map[String, AnyRef]] } else { engineInstanceMap = new util.HashMap[String, AnyRef]() metricsMap.put(TaskConstant.JOB_ENGINECONN_MAP, engineInstanceMap) @@ -194,7 +337,7 @@ object JobHistoryHelper extends Logging { val ticketId = infoMap.get(TaskConstant.TICKET_ID).asInstanceOf[String] val engineExtraInfoMap = engineInstanceMap .getOrDefault(ticketId, new util.HashMap[String, AnyRef]) - .asInstanceOf[util.HashMap[String, AnyRef]] + .asInstanceOf[util.Map[String, AnyRef]] engineExtraInfoMap.putAll(infoMap) engineInstanceMap.put(ticketId, engineExtraInfoMap) } else { @@ -217,6 +360,9 @@ object JobHistoryHelper extends Logging { if (null != infoMap && infoMap.containsKey(TaskConstant.ENGINE_INSTANCE)) { metricsMap.put(TaskConstant.ENGINE_INSTANCE, infoMap.get(TaskConstant.ENGINE_INSTANCE)) } + if (null != infoMap && infoMap.containsKey(TaskConstant.JOB_IS_REUSE)) { + metricsMap.put(TaskConstant.JOB_IS_REUSE, infoMap.get(TaskConstant.JOB_IS_REUSE)) + } } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/SafeUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/SafeUtils.scala new file mode 100644 index 00000000000..eda8b13fc43 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/SafeUtils.scala @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.utils + +import org.apache.linkis.common.utils.Logging + +import java.util.regex.Pattern + +object SafeUtils extends Logging { + + private val DANGEROUS_CODE_PATTERN = "/etc/passwd|" + + "/etc/shadow|" + + "/etc/group|" + + "open\\(\\s*['\"]/etc/[^'\"]+['\"]\\s*,|" + + "subprocess|" + + "os\\.system|" + + "os\\.popen|" + + "shutil\\.execute|" + + "eval|`.*?`|" + + "import\\s+os\\.env|" + + "import\\s+os\\.getlogin|" + + "import\\s+os\\.getpid|" + + "import\\s+os\\.getppid|" + + "import\\s+os\\.getcwd|" + + "import\\s+os\\.getexecname|" + + "import\\s+os\\.startfile|" + + "import\\s+os\\.fork|" + + "import\\s+os\\.kill|" + + "import\\s+os\\.wait|" + + "import\\s+os\\.waitpid|" + + "import\\s+os\\.wait3|" + + "import\\s+os\\.wait4|" + + "import\\s+os\\.confstr|" + + "import\\s+os\\.sysconf|" + + "import\\s+os\\.uname|" + + "import\\s+os\\.urandom|" + + "import\\s+os\\.chroot|" + + "import\\s+os\\.setuid|" + + "import\\s+os\\.setgid|" + + "import\\s+os\\.setgroups|" + + "import\\s+os\\.initgroups|" + + "import\\s+os\\.getgrouplist|" + + "import\\s+os\\.getlogin|" + + "import\\s+os\\.getpgid|" + + "import\\s+os\\.getpgrp|" + + "import\\s+os\\.getsid|" + + "import\\s+os\\.setpgid|" + + "import\\s+os\\.setpgrp|" + + "import\\s+os\\.setsid|" + + "import\\s+os\\.forkpty|" + + "import\\s+os\\.setreuid|" + + "import\\s+os\\.setregid|" + + "import\\s+os\\.getresuid|" + + "import\\s+os\\.getresgid" + + private val ANNOTATION_PATTERN = "\\s*#.*$" + + private val SAFETY_PASS = "SAFETY_PASS" + + def isCodeSafe(code: String): Boolean = { + var isSafe = true + // 在匹配高危代码前,先移除注释 + val commentPattern = Pattern.compile(ANNOTATION_PATTERN, Pattern.MULTILINE) + val cleanCode = commentPattern.matcher(code).replaceAll("") + val code_pattern = + Pattern.compile(DANGEROUS_CODE_PATTERN, Pattern.DOTALL | Pattern.CASE_INSENSITIVE) + val code_matcher = code_pattern.matcher(cleanCode) + while (code_matcher.find) { + isSafe = false + val mather = commentPattern.matcher(code) + while (mather.find) + if (mather.group.toUpperCase().contains(SAFETY_PASS)) isSafe = true + } + isSafe + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java new file mode 100644 index 00000000000..c5efb5633ea --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class SQLExplainTest { + + @Test + void isSelectCmdNoLimit() { + + String code = "SELECT * from dual WHERE (1=1)LIMIT 1;"; + boolean res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(false, res); + + code = "SELECT * from dual"; + res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(true, res); + + code = "SELECT * from dual LIMIT 1;"; + res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(false, res); + } + + @Test + void isSelectOverLimit() { + String code = "SELECT * from dual WHERE (1=1)LIMIT 5001;"; + boolean res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(true, res); + + code = "SELECT * from dual"; + res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(false, res); + + code = "SELECT * from dual LIMIT 4000;"; + res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(false, res); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java new file mode 100644 index 00000000000..7a6846df577 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.apache.linkis.governance.common.entity.job.JobRequest; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class TemplateConfUtilsTest { + + @Test + void getCustomTemplateConfName() { + JobRequest js = new JobRequest(); + StringBuilder logBuilder = new StringBuilder(); + String sqlCode = + "" + + "--注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + js.setExecutionCode(sqlCode); + String res = TemplateConfUtils.getCustomTemplateConfName(js, "sql", logBuilder); + assertEquals(res, ""); + + String sqlCode2 = + "" + + "---@set 123=注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + js.setExecutionCode(sqlCode2); + res = TemplateConfUtils.getCustomTemplateConfName(js, "sql", logBuilder); + assertEquals(res, ""); + + String sqlCode3 = + "" + + "---@set ec.resource.name=345\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + "---@set ec.resource.name=456\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + js.setExecutionCode(sqlCode3); + res = TemplateConfUtils.getCustomTemplateConfName(js, "sql", logBuilder); + assertEquals(res, "345"); + + String sqlCode4 = + "" + + "---@set ec.resource.name= name1 \n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + js.setExecutionCode(sqlCode4); + res = TemplateConfUtils.getCustomTemplateConfName(js, "sql", logBuilder); + assertEquals(res, "name1"); + + String sqlCode5 = + "" + + "##@set ec.resource.name=pyname1\n" + + "select * from table;\n" + + " --注解 \n" + + "#注解\n" + + "##@set ec.resource.name= 123 \n" + + " select \"--注解\" as test\n" + + "#@set yy=123\n" + + " #注解"; + js.setExecutionCode(sqlCode5); + res = TemplateConfUtils.getCustomTemplateConfName(js, "python", logBuilder); + assertEquals(res, "pyname1"); + + String sqlCode6 = + "" + + "///@set ec.resource.name= scalaname1 \n" + + " select \"//注解\" as test\n" + + "//@set yy=123\n" + + " #注解"; + js.setExecutionCode(sqlCode6); + res = TemplateConfUtils.getCustomTemplateConfName(js, "scala", logBuilder); + assertEquals(res, "scalaname1"); + + String sqlCode7 = + "" + + "---@set ec.resource.name= hqlname1 \n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + js.setExecutionCode(sqlCode7); + res = TemplateConfUtils.getCustomTemplateConfName(js, "hql", logBuilder); + assertEquals(res, "hqlname1"); + + String sqlCode8 = + "---@set ec.resource.name=linkis_test2;\n" + + " ---@set ec.resource.name=scriptis_test hive;\n" + + " select * from dss autotest.demo data limit 100;"; + js.setExecutionCode(sqlCode8); + res = TemplateConfUtils.getCustomTemplateConfName(js, "hql", logBuilder); + assertEquals(res, "linkis_test2"); + } + + @Test + void getCustomTemplateConfName2() { + JobRequest js = new JobRequest(); + StringBuilder logBuilder = new StringBuilder(); + String sqlCode9 = + "---@set ec.resource.name=linkis_test2;\r\n---@set ec.resource.name=scriptis_test_hive;\r\n--@set limitn=100\r\nselect * from dss_autotest.demo_data limit ${limitn};\r\n"; + js.setExecutionCode(sqlCode9); + String res = TemplateConfUtils.getCustomTemplateConfName(js, "hql", logBuilder); + assertEquals(res, "linkis_test2"); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java new file mode 100644 index 00000000000..635a083d364 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import java.util.Arrays; + +import org.junit.jupiter.api.Test; +import org.junit.platform.commons.util.StringUtils; + +import static org.junit.jupiter.api.Assertions.*; + +public class TestCommentHelper { + String sqlCode = + "" + + "--注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + String scalaCode = + "" + + "// 注解\n" + + "print(1+1)\n" + + "//@set yy=123\n" + + " print(2)\n" + + " // 注解 \n" + + "// test\n" + + "print(\"//注解测试\")"; + + String scalaCodeRes = "print(1+1)\n" + "print(2)\n" + "print(\"//注解测试\")"; + + @Test + void sqlDealCommentTest() { + String code = SQLCommentHelper.dealComment(sqlCode); + // System.out.println(code); + } + + @Test + void scalaDealCommentTest() { + String code = ScalaCommentHelper.dealComment(scalaCode); + String[] lines = + Arrays.stream(code.split("\n")) + .map(String::trim) + .filter(x -> StringUtils.isNotBlank(x)) + .toArray(String[]::new); + String result = String.join("\n", lines); + // assertEquals(result,scalaCodeRes); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java new file mode 100644 index 00000000000..7c9f334a7e9 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.apache.linkis.entrance.log.Cache; +import org.apache.linkis.entrance.log.CacheLogReader; +import org.apache.linkis.entrance.log.HDFSCacheLogWriter; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import oshi.util.FileUtil; + +class TestHDFSCacheLogWriter { + + @Test + void write() throws IOException { + + Cache cache = new Cache(5); + String fileName = UUID.randomUUID().toString().replace("-", "") + "-test.log"; + String username = System.getProperty("user.name"); + String parentPath = System.getProperty("java.io.tmpdir") + File.separator + username; + String logPath = parentPath + File.separator + fileName; + System.out.println(logPath); + String chartSet = "utf-8"; + + File file = new File(parentPath); + file.mkdirs(); + + File logfile = new File(logPath); + logfile.createNewFile(); // NOSONAR + + HDFSCacheLogWriter logWriter = + new HDFSCacheLogWriter( + // "D:\\DataSphere\\linkis\\docs\\test.log", + logPath, chartSet, cache, username); + + String[] msgArr = + new String[] { + "1", "2", "3", "4", "5", "6", + "7", "8", "9", "10", "11", "12", + "13", "14", "15", "16", "17", "18", + "19", "20", "21", "22" + }; + + List msgList = new ArrayList(Arrays.asList(msgArr)); + String msg = String.join("\n", msgList); + + logWriter.write(msg); + + CacheLogReader logReader = new CacheLogReader(logPath, chartSet, cache, username); + String[] logs = new String[4]; + int fromLine = 1; + int size = 1000; + int retFromLine = logReader.readArray(logs, fromLine, size); + Assertions.assertEquals(msgArr.length, retFromLine); + logWriter.flush(); + List list = FileUtil.readFile(logPath); + String res = String.join("\n", list); + Assertions.assertEquals(res, msg); + } + + @Test + void write2() throws IOException, InterruptedException { + + Cache cache = new Cache(30); + String fileName = UUID.randomUUID().toString().replace("-", "") + "-test.log"; + String username = System.getProperty("user.name"); + String parentPath = System.getProperty("java.io.tmpdir") + File.separator + username; + String logPath = parentPath + File.separator + fileName; + System.out.println(logPath); + String chartSet = "utf-8"; + + File file = new File(parentPath); + file.mkdirs(); + + File logfile = new File(logPath); + logfile.createNewFile(); // NOSONAR + + HDFSCacheLogWriter logWriter = + new HDFSCacheLogWriter( + // "D:\\DataSphere\\linkis\\docs\\test.log", + logPath, chartSet, cache, username); + + String[] msgArr = + new String[] { + "1", "2", "3", "4", "5", "6", + "7", "8", "9", "10", "11", "12", + "13", "14", "15", "16", "17", "18", + "19", "20", "21", "22" + }; + + List msgList = new ArrayList(Arrays.asList(msgArr)); + String msg = String.join("\n", msgList); + + logWriter.write(msg); + + Thread.sleep(4 * 1000); // NOSONAR + + logWriter.write(msg); + + CacheLogReader logReader = new CacheLogReader(logPath, chartSet, cache, username); + String[] logs = new String[4]; + int fromLine = 1; + int size = 1000; + int retFromLine = logReader.readArray(logs, fromLine, size); + Assertions.assertEquals(msgArr.length * 2, retFromLine); + Assertions.assertEquals(msg + "\n" + msg, logs[3]); + + logWriter.flush(); + + List list = FileUtil.readFile(logPath); + String res = String.join("\n", list); + Assertions.assertEquals(msg + "\n" + msg, res); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/resources/linkis.properties b/linkis-computation-governance/linkis-entrance/src/test/resources/linkis.properties new file mode 100644 index 00000000000..79800c99f2e --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/resources/linkis.properties @@ -0,0 +1,16 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +wds.linkis.entrance.log.push.interval.time=3000 \ No newline at end of file diff --git a/linkis-computation-governance/linkis-jdbc-driver/pom.xml b/linkis-computation-governance/linkis-jdbc-driver/pom.xml index cdf23c5f21f..28437d95a54 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/pom.xml +++ b/linkis-computation-governance/linkis-jdbc-driver/pom.xml @@ -41,6 +41,15 @@ + + + org.apache.maven.plugins + maven-surefire-plugin + + true + + + net.alchim31.maven scala-maven-plugin diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/LinkisSQLErrorCode.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/LinkisSQLErrorCode.java new file mode 100644 index 00000000000..442cbbfb435 --- /dev/null +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/LinkisSQLErrorCode.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.jdbc; + +public enum LinkisSQLErrorCode { + BAD_URL(80000, "bad url"), + NOSUPPORT_DRIVER(80001, "this method not supported in driver"), + NOSUPPORT_CONNECTION(80002, "this method not supported in connection"), + NOSUPPORT_STATEMENT(80003, "this method not supported in statement"), + CONNECTION_CLOSED(80004, "Connection is closed!"), + STATEMENT_CLOSED(80005, "statement is closed!"), + SCHEMA_EMPTY(80006, "schema is empty!"), + SCHEMA_FAILED(80007, "Get schema failed!"), + QUERY_TIMEOUT(80008, "query has been timeout!"), + FILETYPE_ERROR(80009, "file type error"), + METADATATYPE_ERROR(80010, "metadata type error"), + NOSUPPORT_METADATA(80011, "this method not supported in DatabaseMetaData"), + NOPERMITION(80012, "This user has no permission to read this file!"), + PARAMS_NOT_FOUND(80013, "Parameter not found"), + ERRORINFO_FROM_JOBINFO(80014, "get errorinfo from jobInfo"), + RESULTSET_ROWERROR(80015, "row message error"), + NOSUPPORT_RESULTSET(80016, "this method not supported in resultSet"), + RESULTSET_NULL( + 80017, "resultset is null,try to run next() firstly to init ResultSet and MetaData"), + PREPARESTATEMENT_TYPEERROR(80018, "parameter type error"), + METADATA_EMPTY(80019, "data is empty"), + UNKNOWN_ERROR(80020, "unknown error"); + private String msg; + private int code; + + LinkisSQLErrorCode(int code, String msg) { + this.code = code; + this.msg = msg; + } + + public String getMsg() { + return msg; + } + + public int getCode() { + return code; + } + + public void setMsg(String msg) { + this.msg = msg; + } +} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java index 90e1f73563a..0bc0b08c520 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java @@ -42,15 +42,17 @@ public class UJESSQLDriver extends UJESSQLDriverMain implements Driver { static String DB_NAME = "DBNAME"; static String PARAMS = "PARAMS"; static String ENGINE_TYPE = "EngineType"; + static String ENGINE_VERSION = "EngineVersion"; static String USER = "user"; static String TOKEN_KEY = "key"; static String TOKEN_VALUE = "value"; static String PASSWORD = "password"; static boolean TABLEAU_SERVER = false; - static String LIMIT_ENABLED = "true"; - static String LIMIT = "limit"; + static String FIXED_SESSION = "fixedSession"; + static String ENABLE_MULTI_RESULT = "enableMultiResult"; + static String USE_SSL = "useSSL"; static String VERSION = "version"; static int DEFAULT_VERSION = 1; static String MAX_CONNECTION_SIZE = "maxConnectionSize"; @@ -58,7 +60,7 @@ public class UJESSQLDriver extends UJESSQLDriverMain implements Driver { static String ENABLE_DISCOVERY = "enableDiscovery"; static String ENABLE_LOADBALANCER = "enableLoadBalancer"; static String CREATOR = "creator"; - + static String TABLEAU = "tableau"; static String VARIABLE_HEADER = "var:"; static String PARAM_SPLIT = "&"; static String KV_SPLIT = "="; diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLErrorCode.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLErrorCode.java deleted file mode 100644 index fc283d8fbe2..00000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLErrorCode.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc; - -public enum UJESSQLErrorCode { - BAD_URL(80000, "bad url"), - NOSUPPORT_DRIVER(80001, "this method not supported in driver"), - NOSUPPORT_CONNECTION(80002, "this method not supported in connection"), - NOSUPPORT_STATEMENT(80003, "this method not supported in statement"), - CONNECTION_CLOSED(80004, "Connection is closed!"), - STATEMENT_CLOSED(80005, "statement is closed!"), - SCHEMA_EMPTY(80006, "schema is empty!"), - SCHEMA_FAILED(80007, "Get schema failed!"), - QUERY_TIMEOUT(80008, "query has been timeout!"), - FILETYPE_ERROR(80009, "file type error"), - METADATATYPE_ERROR(80010, "metadata type error"), - NOSUPPORT_METADATA(80011, "this method not supported in DatabaseMetaData"), - NOPERMITION(80012, "This user has no permission to read this file!"), - PARAMS_NOT_FOUND(80013, "Parameter not found"), - ERRORINFO_FROM_JOBINFO(80014, "get errorinfo from jobInfo"), - RESULTSET_ROWERROR(80015, "row message error"), - NOSUPPORT_RESULTSET(80016, "this method not supported in resultSet"), - RESULTSET_NULL( - 80017, "resultset is null,try to run next() firstly to init ResultSet and MetaData"), - PREPARESTATEMENT_TYPEERROR(80018, "parameter type error"), - METADATA_EMPTY(80019, "data is empty"); - private String msg; - private int code; - - UJESSQLErrorCode(int code, String msg) { - this.code = code; - this.msg = msg; - } - - public String getMsg() { - return msg; - } - - public int getCode() { - return code; - } -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java index 061acd42f90..3e1e7e31821 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java @@ -17,10 +17,16 @@ package org.apache.linkis.ujes.jdbc.utils; +import org.apache.linkis.common.utils.Utils; + +import java.util.concurrent.atomic.AtomicInteger; + public class JDBCUtils { private static final char SEARCH_STRING_ESCAPE = '\\'; + public static final AtomicInteger idCreator = new AtomicInteger(); + public static String convertPattern(final String pattern) { if (pattern == null) { return ".*"; @@ -41,8 +47,6 @@ public static String convertPattern(final String pattern) { continue; } else if (c == '%') { result.append(".*"); - } else if (c == '_') { - result.append('.'); } else { result.append(Character.toLowerCase(c)); } @@ -52,4 +56,8 @@ public static String convertPattern(final String pattern) { return result.toString(); } } + + public static String getUniqId() { + return Utils.getLocalHostname() + "_" + JDBCUtils.idCreator.getAndIncrement(); + } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala new file mode 100644 index 00000000000..e9c07400cdd --- /dev/null +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala @@ -0,0 +1,498 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.jdbc + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.governance.common.constant.job.JobRequestConstants +import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.manager.label.entity.engine.{EngineType, EngineTypeLabel, RunType} +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator +import org.apache.linkis.ujes.client.UJESClient +import org.apache.linkis.ujes.client.request.JobSubmitAction +import org.apache.linkis.ujes.client.response.JobExecuteResult +import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain._ +import org.apache.linkis.ujes.jdbc.utils.JDBCUtils + +import org.apache.commons.lang3.StringUtils + +import java.{sql, util} +import java.sql.{ + Blob, + CallableStatement, + Clob, + Connection, + DatabaseMetaData, + NClob, + PreparedStatement, + ResultSet, + Savepoint, + SQLException, + SQLWarning, + SQLXML, + Statement, + Struct +} +import java.util.Properties +import java.util.concurrent.Executor + +import scala.collection.JavaConverters._ + +class LinkisSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Properties) + extends Connection + with Logging { + + private[jdbc] var creator = "JDBCDriver" + + private[jdbc] var tableauFlag = false + + private[jdbc] val variableMap = { + val params = props.getProperty(PARAMS) + val map = new util.HashMap[String, AnyRef] + if (params != null) { + params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { + case Array(k, v) if k.startsWith(VARIABLE_HEADER) => + map.put(k.substring(VARIABLE_HEADER.length), v) + case Array(CREATOR, v) => + creator = v + case _ => + } + } + map + } + + def isTableau(): Boolean = { + val params = props.getProperty(PARAMS) + if (params != null) { + params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { + case Array(TABLEAU, v) => + tableauFlag = true + case _ => + } + } + tableauFlag + } + + private[jdbc] val dbName = props.getProperty(DB_NAME) + + private val runningSQLStatements = new util.LinkedList[Statement] + + private var closed = false + + private var inited = false + + private[jdbc] val user = props.getProperty(USER) + + private[jdbc] val serverURL = props.getProperty("URL") + + private[jdbc] val fixedSessionEnabled = + if ( + props + .containsKey(FIXED_SESSION) && "true".equalsIgnoreCase(props.getProperty(FIXED_SESSION)) + ) { + true + } else { + false + } + + private val connectionId = JDBCUtils.getUniqId() + + private val labelMap: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + + private val startupParams: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + + private val runtimeParams: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + + private[jdbc] def getEngineType: EngineTypeLabel = { + + var engineType = EngineType.TRINO.toString + var engineVersion = "" + if (props.containsKey(PARAMS)) { + val params = props.getProperty(PARAMS) + if (params != null & params.length() > 0) { + params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { + case Array(k, v) => + if (k.equals(UJESSQLDriver.ENGINE_TYPE)) { + engineType = v + } else if (k.equals(UJESSQLDriver.ENGINE_VERSION)) { + engineVersion = v + } + + case _ => + } + } + } + if (StringUtils.isNotBlank(engineVersion)) { + val label = EngineTypeLabelCreator.createEngineTypeLabel(engineType) + label.setVersion(engineVersion) + label + } else { + EngineTypeLabelCreator.createEngineTypeLabel(engineType) + } + } + + private[jdbc] def throwWhenClosed[T](op: => T): T = + if (isClosed) throw new LinkisSQLException(LinkisSQLErrorCode.CONNECTION_CLOSED) + else op + + private def createStatementAndAdd[T <: Statement](op: => T): T = throwWhenClosed { + + val statement = op + runningSQLStatements.add(statement) + statement + } + + def getProps: Properties = props + + def removeStatement(statement: LinkisSQLStatement): Unit = runningSQLStatements.remove(statement) + + override def createStatement(): Statement = createStatementAndAdd(new LinkisSQLStatement(this)) + + override def prepareStatement(sql: String): LinkisSQLPreparedStatement = { + val statement = createStatementAndAdd(new LinkisSQLPreparedStatement(this, sql)) + statement.clearQuery() + statement + } + + override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = { + if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new SQLException( + "Statement with resultset concurrency " + resultSetConcurrency + " is not supported", + "HYC00" + ) + } + if (resultSetType == ResultSet.TYPE_SCROLL_SENSITIVE) { + throw new SQLException( + "Statement with resultset type " + resultSetType + " is not supported", + "HYC00" + ) + } + createStatementAndAdd(new LinkisSQLStatement(this)) + } + + override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = + prepareStatement(sql) + + override def prepareStatement( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int + ): PreparedStatement = prepareStatement(sql) + + override def getMetaData: DatabaseMetaData = throwWhenClosed(new UJESSQLDatabaseMetaData(this)) + + override def close(): Unit = { + runningSQLStatements.asScala.foreach { statement => Utils.tryQuietly(statement.close()) } + closed = true + } + + override def isClosed: Boolean = closed + + override def setReadOnly(readOnly: Boolean): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setReadOnly not supported" + ) + + override def isReadOnly: Boolean = false + + override def setCatalog(catalog: String): Unit = throwWhenClosed() + + override def getCatalog: String = "" + + override def setTransactionIsolation(level: Int): Unit = {} + + override def getTransactionIsolation: Int = Connection.TRANSACTION_NONE + + override def getWarnings: SQLWarning = null + + override def clearWarnings(): Unit = {} + + override def setAutoCommit(autoCommit: Boolean): Unit = {} + + override def getAutoCommit: Boolean = true + + override def commit(): Unit = {} + + override def prepareCall(sql: String): CallableStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareCall not supported" + ) + + override def rollback(): Unit = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") + + override def nativeSQL(sql: String): String = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "nativeSQL not supported") + + override def prepareCall( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int + ): CallableStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareCall not supported" + ) + + override def getTypeMap: util.Map[String, Class[_]] = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getTypeMap not supported" + ) + + override def setTypeMap(map: util.Map[String, Class[_]]): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setTypeMap not supported" + ) + + override def setHoldability(holdability: Int): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setHoldability not supported" + ) + + override def getHoldability: Int = 0 + + override def setSavepoint(): Savepoint = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setSavepoint not supported" + ) + + override def setSavepoint(name: String): Savepoint = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setSavepoint not supported" + ) + + override def rollback(savepoint: Savepoint): Unit = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") + + override def releaseSavepoint(savepoint: Savepoint): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "releaseSavepoint not supported" + ) + + override def createStatement( + resultSetType: Int, + resultSetConcurrency: Int, + resultSetHoldability: Int + ): Statement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createStatement not supported" + ) + + override def prepareStatement( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int, + resultSetHoldability: Int + ): PreparedStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareStatement not supported" + ) + + override def prepareCall( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int, + resultSetHoldability: Int + ): CallableStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareCall not supported" + ) + + override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareStatement not supported" + ) + + override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareStatement not supported" + ) + + override def createClob(): Clob = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createClob not supported" + ) + + override def createBlob(): Blob = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createBlob not supported" + ) + + override def createNClob(): NClob = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createNClob not supported" + ) + + override def createSQLXML(): SQLXML = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createSQLXML not supported" + ) + + override def isValid(timeout: Int): Boolean = true + + override def setClientInfo(name: String, value: String): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setClientInfo not supported" + ) + + override def setClientInfo(properties: Properties): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "properties not supported" + ) + + override def getClientInfo(name: String): String = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getClientInfo not supported" + ) + + override def getClientInfo: Properties = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getClientInfo not supported" + ) + + override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createArrayOf not supported" + ) + + override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createStruct not supported" + ) + + override def setSchema(schema: String): Unit = throwWhenClosed { + if (StringUtils.isBlank(schema)) { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "schema is empty!") + } + createStatement().execute("use " + schema) + } + + override def getSchema: String = throwWhenClosed { + val resultSet = createStatement().executeQuery("SELECT current_database()") + if (!resultSet.next()) { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "Get schema failed!") + } + resultSet.getString(1) + } + + override def abort(executor: Executor): Unit = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "abort not supported") + + override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setNetworkTimeout not supported" + ) + + override def getNetworkTimeout: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getNetworkTimeout not supported" + ) + + override def unwrap[T](iface: Class[T]): T = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "unwrap not supported") + + override def isWrapperFor(iface: Class[_]): Boolean = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "isWrapperFor not supported" + ) + + def addLabels(labels: util.Map[String, AnyRef]): Unit = { + labelMap.putAll(labels) + } + + def addStartUpParams(params: util.Map[String, AnyRef]): Unit = { + startupParams.putAll(params) + } + + def addRuntimeParams(params: util.Map[String, AnyRef]): Unit = { + runtimeParams.putAll(params) + } + + def engineToCodeType(engine: String): String = { + val runType = EngineType.mapStringToEngineType(engine) match { + case EngineType.SPARK => RunType.SQL + case EngineType.HIVE => RunType.HIVE + case EngineType.REPL => RunType.REPL + case EngineType.DORIS => RunType.DORIS + case EngineType.TRINO => RunType.TRINO_SQL + case EngineType.PRESTO => RunType.PRESTO_SQL + case EngineType.NEBULA => RunType.NEBULA_SQL + case EngineType.ELASTICSEARCH => RunType.ES_SQL + case EngineType.JDBC => RunType.JDBC + case EngineType.PYTHON => RunType.SHELL + case _ => RunType.SQL + } + runType.toString + } + + private[jdbc] def toSubmit(code: String): JobExecuteResult = { + val engineTypeLabel = getEngineType + labelMap.put(LabelKeyConstant.ENGINE_TYPE_KEY, engineTypeLabel.getStringValue) + labelMap.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, s"$user-$creator") + labelMap.put(LabelKeyConstant.CODE_TYPE_KEY, engineToCodeType(engineTypeLabel.getEngineType)) + if (fixedSessionEnabled) { + labelMap.put(LabelKeyConstant.FIXED_EC_KEY, connectionId) + logger.info("Fixed session is enable session id is {}", connectionId) + } + + if (StringUtils.isNotBlank(dbName)) { + runtimeParams.put(JobRequestConstants.LINKIS_JDBC_DEFAULT_DB, dbName) + } + + val jobSubmitAction = JobSubmitAction.builder + .addExecuteCode(code) + .setStartupParams(startupParams) + .setUser(user) + .addExecuteUser(user) + .setLabels(labelMap) + .setRuntimeParams(runtimeParams) + .setVariableMap(variableMap) + .build + + val result = ujesClient.submit(jobSubmitAction) + if (result.getStatus != 0) { + throw new SQLException(result.getMessage) + } + result + } + + override def toString: String = "LinkisConnection_" + connectionId + +} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLException.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLException.scala new file mode 100644 index 00000000000..9cc3f3814f6 --- /dev/null +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLException.scala @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.jdbc + +import java.sql.SQLException + +class LinkisSQLException(msg: String, code: String, vendorCode: Int) + extends SQLException(msg, code, vendorCode) { + + def this(errorCode: LinkisSQLErrorCode, msg: String) { + this(msg, errorCode.getCode.toString, 0) + } + + def this(errorCode: LinkisSQLErrorCode) { + this(errorCode.getMsg, errorCode.getCode.toString, 0) + } + + def this(msg: String, code: String) { + this(msg, code, 0) + } + +} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLPreparedStatement.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLPreparedStatement.scala new file mode 100644 index 00000000000..61a7020946a --- /dev/null +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLPreparedStatement.scala @@ -0,0 +1,351 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.jdbc + +import java.io.{InputStream, Reader} +import java.net.URL +import java.sql.{ + Blob, + Clob, + Date, + NClob, + ParameterMetaData, + PreparedStatement, + Ref, + ResultSetMetaData, + RowId, + SQLXML, + Time, + Timestamp +} +import java.util +import java.util.Calendar + +class LinkisSQLPreparedStatement(ujesSQLConnection: LinkisSQLConnection, sql: String) + extends LinkisSQLStatement(ujesSQLConnection) + with PreparedStatement { + + private val parameters = new util.HashMap[Int, Any] + + private var parameterMetaData: ParameterMetaData = _ + + private var batchTimes = 0 + + private def updateSql(sql: String, parameters: util.HashMap[Int, Any]): String = { + if (!sql.contains("?")) { + sql + } else { + val newSql = new StringBuilder(sql) + for (paramLoc <- 1 to parameters.size()) { + if (parameters.containsKey(paramLoc)) { + val charIndex = getCharIndexFromSqlByParamLocation(newSql.toString(), '?', 1) + newSql.deleteCharAt(charIndex) + newSql.insert(charIndex, parameters.get(paramLoc).asInstanceOf[String]) + } + } + newSql.toString() + } + } + + private def getCharIndexFromSqlByParamLocation(sql: String, cchar: Char, paramLoc: Int): Int = { + var signalCount = 0 + var charIndex = -1 + var num = 0 + for (i <- 0 to sql.length - 1 if charIndex == -1) { + val c = sql.charAt(i) + if (c != '\'' && c != '\\') { + if (c == cchar && signalCount % 2 == 0) { + num += 1 + if (num == paramLoc) { + charIndex = i + } + } + } else { + signalCount += signalCount + } + } + charIndex + } + + override def executeQuery(): UJESSQLResultSet = { + super.executeQuery(updateSql(sql, parameters)) + } + + override def executeUpdate(): Int = { + super.executeUpdate(updateSql(sql, parameters)) + } + + override def setNull(parameterIndex: Int, sqlType: Int): Unit = { + parameters.put(parameterIndex, "NULL") + } + + override def setBoolean(parameterIndex: Int, x: Boolean): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setByte(parameterIndex: Int, x: Byte): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setShort(parameterIndex: Int, x: Short): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setInt(parameterIndex: Int, x: Int): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setLong(parameterIndex: Int, x: Long): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setFloat(parameterIndex: Int, x: Float): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setDouble(parameterIndex: Int, x: Double): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setBigDecimal(parameterIndex: Int, x: java.math.BigDecimal): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setString(parameterIndex: Int, x: String): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setBytes(parameterIndex: Int, x: Array[Byte]): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setDate(parameterIndex: Int, x: Date): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setTime(parameterIndex: Int, x: Time): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = { + parameters.put(parameterIndex, x + "") + } + + override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def clearParameters(): Unit = { + parameters.clear() + } + + override def setObject(parameterIndex: Int, x: scala.Any, targetSqlType: Int): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setObject(parameterIndex: Int, x: scala.Any): Unit = { + x match { + case null => setNull(parameterIndex, 0) + case x: String => setString(parameterIndex, x) + case x: Short => setShort(parameterIndex, x) + case x: Int => setInt(parameterIndex, x) + case x: Long => setLong(parameterIndex, x) + case x: Float => setFloat(parameterIndex, x) + case x: Double => setDouble(parameterIndex, x) + case x: Boolean => setBoolean(parameterIndex, x) + case x: Byte => setByte(parameterIndex, x) + case x: Char => setString(parameterIndex, x.toString) + case x: Timestamp => setTimestamp(parameterIndex, x) + case _ => + throw new LinkisSQLException( + LinkisSQLErrorCode.PREPARESTATEMENT_TYPEERROR, + s"Can''t infer the SQL type to use for an instance of ${x.getClass.getName}. Use setObject() with an explicit Types value to specify the type to use" + ) + } + } + + override def execute(): Boolean = { + val res = super.execute(updateSql(sql, parameters)) + for (i <- 1 to batchTimes) { + super.execute(updateSql(sql, parameters)) + } + res + } + + override def executeBatch(): Array[Int] = { + + for (i <- 0 to batchTimes) { + super.execute(updateSql(sql, parameters)) + } + Array(1, 1) + } + + override def addBatch(): Unit = { + this.batchTimes = this.batchTimes + 1 + } + + override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setRef(parameterIndex: Int, x: Ref): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setBlob(parameterIndex: Int, x: Blob): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setClob(parameterIndex: Int, x: Clob): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setArray(parameterIndex: Int, x: java.sql.Array): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def getMetaData: ResultSetMetaData = { + if (super.getResultSet == null) { + return null + } + super.getResultSet.getMetaData + } + + override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = { + parameters.put(parameterIndex, "NULL") + } + + override def setURL(parameterIndex: Int, x: URL): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def getParameterMetaData: ParameterMetaData = { + synchronized { + if (null == this.parameterMetaData) { + this.parameterMetaData = new LinkisParameterMetaData(sql.count(_ == '?')) + } + } + this.parameterMetaData + } + + override def setRowId(parameterIndex: Int, x: RowId): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setNString(parameterIndex: Int, value: String): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setNClob(parameterIndex: Int, value: NClob): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setObject( + parameterIndex: Int, + x: scala.Any, + targetSqlType: Int, + scaleOrLength: Int + ): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setClob(parameterIndex: Int, reader: Reader): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def setNClob(parameterIndex: Int, reader: Reader): Unit = { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) + } + + override def getResultSetType: Int = { + super.getResultSetType + } + +} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala new file mode 100644 index 00000000000..e3a1475d2b9 --- /dev/null +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala @@ -0,0 +1,375 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.jdbc + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.governance.common.entity.ExecutionNodeStatus +import org.apache.linkis.ujes.client.request.OpenLogAction +import org.apache.linkis.ujes.client.response.JobExecuteResult +import org.apache.linkis.ujes.jdbc.hook.JDBCDriverPreExecutionHook + +import org.apache.commons.lang3.StringUtils + +import java.sql.{Connection, ResultSet, SQLException, SQLWarning, Statement} +import java.util +import java.util.concurrent.TimeUnit + +import scala.concurrent.TimeoutException +import scala.concurrent.duration.Duration + +class LinkisSQLStatement(private[jdbc] val ujesSQLConnection: LinkisSQLConnection) + extends Statement + with Logging { + + private var jobExecuteResult: JobExecuteResult = _ + + private val openedResultSets: util.ArrayList[UJESSQLResultSet] = + new util.ArrayList[UJESSQLResultSet]() + + private var resultSet: UJESSQLResultSet = _ + private var closed = false + private var maxRows: Int = 0 + private var fetchSize = 100 + private var queryTimeout = 0 + + private var logPath: String = null + + private var queryEnd = false + + private var logFromLen = 0 + private val logSize = 100 + + private[jdbc] def throwWhenClosed[T](op: => T): T = ujesSQLConnection.throwWhenClosed { + if (isClosed) throw new LinkisSQLException(LinkisSQLErrorCode.STATEMENT_CLOSED) + else op + } + + override def executeQuery(sql: String): UJESSQLResultSet = { + if (!execute(sql)) throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) + resultSet + } + + override def executeUpdate(sql: String): Int = { + execute(sql) + 0 + } + + override def close(): Unit = { + closed = true + clearQuery() + } + + def clearQuery(): Unit = { + if (jobExecuteResult != null && !queryEnd) { + Utils.tryAndWarn(ujesSQLConnection.ujesClient.kill(jobExecuteResult)) + jobExecuteResult = null + } + if (resultSet != null) { + Utils.tryAndWarn(resultSet.close()) + resultSet = null + } + } + + override def getMaxFieldSize: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "getMaxFieldSize not supported" + ) + + override def setMaxFieldSize(max: Int): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "setMaxFieldSize not supported" + ) + + override def getMaxRows: Int = maxRows + + override def setMaxRows(max: Int): Unit = this.maxRows = max + + override def setEscapeProcessing(enable: Boolean): Unit = if (enable) { + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "setEscapeProcessing not supported" + ) + } + + override def getQueryTimeout: Int = queryTimeout + + override def setQueryTimeout(seconds: Int): Unit = throwWhenClosed(queryTimeout = seconds * 1000) + + override def cancel(): Unit = throwWhenClosed(clearQuery()) + + override def getWarnings: SQLWarning = null + + override def clearWarnings(): Unit = {} + + override def setCursorName(name: String): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "setCursorName not supported" + ) + + override def execute(sql: String): Boolean = throwWhenClosed { + var parsedSQL = sql + JDBCDriverPreExecutionHook.getPreExecutionHooks.foreach { preExecution => + parsedSQL = preExecution.callPreExecutionHook(parsedSQL, !ujesSQLConnection.isTableau()) + } + logger.info(s"begin to execute sql ${parsedSQL}") + queryEnd = false + logPath = null + try { + jobExecuteResult = ujesSQLConnection.toSubmit(parsedSQL) + val atMost = + if (queryTimeout > 0) Duration(queryTimeout, TimeUnit.MILLISECONDS) else Duration.Inf + var jobInfo = ujesSQLConnection.ujesClient.getJobInfo(jobExecuteResult) + logPath = jobInfo.getRequestPersistTask.getLogPath + if (!ExecutionNodeStatus.isCompleted(ExecutionNodeStatus.valueOf(jobInfo.getJobStatus))) { + Utils.tryThrow { + Utils.waitUntil( + () => { + jobInfo = ujesSQLConnection.ujesClient.getJobInfo(jobExecuteResult) + ExecutionNodeStatus.isCompleted( + ExecutionNodeStatus.valueOf(jobInfo.getJobStatus) + ) || closed + }, + atMost, + 100, + 10000 + ) + } { + case t: TimeoutException => + if (queryTimeout > 0) clearQuery() + logPath = jobInfo.getRequestPersistTask.getLogPath + new LinkisSQLException(LinkisSQLErrorCode.QUERY_TIMEOUT, "query has been timeout!") + .initCause(t) + case t => t + } + } + logPath = jobInfo.getRequestPersistTask.getLogPath + if (!ExecutionNodeStatus.isSucceed(ExecutionNodeStatus.valueOf(jobInfo.getJobStatus))) { + throw new LinkisSQLException( + jobInfo.getRequestPersistTask.getErrDesc, + jobInfo.getRequestPersistTask.getErrCode.toString + ) + } + + logger.info(s"end to execute sql ${parsedSQL}") + val resultSetList = jobInfo.getResultSetList(ujesSQLConnection.ujesClient) + logger.info(s"resultSetList is ${resultSetList.mkString(",")}") + if (resultSetList != null && resultSetList.nonEmpty) { + resultSet = new UJESSQLResultSet(resultSetList, this, maxRows, fetchSize) + true + } else { + false + } + } catch { + case sqlException: SQLException => + throw sqlException + case throwable: Throwable => + val exception = + new LinkisSQLException(LinkisSQLErrorCode.UNKNOWN_ERROR, throwable.getMessage) + exception.initCause(throwable) + throw exception + } finally { + queryEnd = true + } + } + + def getJobExcuteResult: JobExecuteResult = jobExecuteResult + + override def getResultSet: UJESSQLResultSet = resultSet + + override def getUpdateCount: Int = throwWhenClosed(-1) + + override def getMoreResults: Boolean = getMoreResults(Statement.CLOSE_CURRENT_RESULT) + + override def setFetchDirection(direction: Int): Unit = + throwWhenClosed(if (direction != ResultSet.FETCH_FORWARD) { + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "only FETCH_FORWARD is supported." + ) + }) + + override def getFetchDirection: Int = throwWhenClosed(ResultSet.FETCH_FORWARD) + + override def setFetchSize(rows: Int): Unit = this.fetchSize = rows + + override def getFetchSize: Int = fetchSize + + override def getResultSetConcurrency: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "getResultSetConcurrency not supported." + ) + + override def getResultSetType: Int = throwWhenClosed(ResultSet.TYPE_FORWARD_ONLY) + + override def addBatch(sql: String): Unit = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "addBatch not supported.") + + override def clearBatch(): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "clearBatch not supported." + ) + + override def executeBatch(): Array[Int] = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "executeBatch not supported." + ) + + override def getConnection: Connection = throwWhenClosed(ujesSQLConnection) + + override def getMoreResults(current: Int): Boolean = { + if (this.resultSet == null) { + false + } else { + this.resultSet.getMetaData + val nextResultSet = this.resultSet.getNextResultSet + current match { + case Statement.CLOSE_CURRENT_RESULT => + // 1 - CLOSE CURRENT RESULT SET + this.resultSet.close() + this.resultSet.clearNextResultSet + case Statement.KEEP_CURRENT_RESULT => + // 2 - KEEP CURRENT RESULT SET + this.openedResultSets.add(this.resultSet) + this.resultSet.clearNextResultSet + case Statement.CLOSE_ALL_RESULTS => + // 3 - CLOSE ALL RESULT SET + this.openedResultSets.add(this.resultSet) + closeAllOpenedResultSet() + case _ => + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "getMoreResults with current not in 1,2,3 is not supported, see Statement.getMoreResults" + ) + } + this.resultSet = nextResultSet + this.resultSet != null + } + } + + private def closeAllOpenedResultSet(): Any = { + val iterator = this.openedResultSets.iterator() + while (iterator.hasNext) { + val set = iterator.next() + if (!set.isClosed) { + set.close() + } + } + } + + override def getGeneratedKeys: ResultSet = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "getGeneratedKeys not supported." + ) + + override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "executeUpdate with autoGeneratedKeys not supported." + ) + + override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "executeUpdate with columnIndexes not supported." + ) + + override def executeUpdate(sql: String, columnNames: Array[String]): Int = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "executeUpdate with columnNames not supported." + ) + + override def execute(sql: String, autoGeneratedKeys: Int): Boolean = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "execute with autoGeneratedKeys not supported." + ) + + override def execute(sql: String, columnIndexes: Array[Int]): Boolean = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "execute with columnIndexes not supported." + ) + + override def execute(sql: String, columnNames: Array[String]): Boolean = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "execute with columnNames not supported." + ) + + override def getResultSetHoldability: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "getResultSetHoldability not supported" + ) + + override def isClosed: Boolean = closed + + override def setPoolable(poolable: Boolean): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "setPoolable not supported" + ) + + override def isPoolable: Boolean = false + + override def closeOnCompletion(): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "closeOnCompletion not supported" + ) + + override def isCloseOnCompletion: Boolean = false + + override def unwrap[T](iface: Class[T]): T = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "unwrap not supported") + + override def isWrapperFor(iface: Class[_]): Boolean = false + + /** + * log[0] error log[1] warn log[2] info log[3] all (info + warn + error) + * + * @return + */ + def getAllLog(): Array[String] = { + if (queryEnd && StringUtils.isNotBlank(logPath)) { + val openLogAction = + OpenLogAction.newBuilder().setLogPath(logPath).setProxyUser(ujesSQLConnection.user).build() + ujesSQLConnection.ujesClient.openLog(openLogAction).getLog + } else { + Array.empty[String] + } + } + + /** + * log[0] error log[1] warn log[2] info log[3] all (info + warn + error) + * + * @return + */ + def getIncrementalLog(): util.List[String] = { + if (null != jobExecuteResult && !queryEnd) { + val logObj = ujesSQLConnection.ujesClient.log(jobExecuteResult, logFromLen, logSize) + logFromLen = logObj.fromLine + logObj.getLog + } else { + new util.ArrayList[String] + } + } + +} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala index 60f4d88af54..96132d56415 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala @@ -23,8 +23,10 @@ import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder import org.apache.linkis.ujes.client.UJESClient import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain._ +import org.apache.commons.codec.binary.Hex import org.apache.commons.lang3.StringUtils +import java.nio.charset.StandardCharsets import java.util import java.util.Properties @@ -35,18 +37,51 @@ object UJESClientFactory extends Logging { def getUJESClient(props: Properties): UJESClient = { val host = props.getProperty(HOST) val port = props.getProperty(PORT) - val serverUrl = if (StringUtils.isNotBlank(port)) s"http://$host:$port" else "http://" + host - if (ujesClients.containsKey(serverUrl)) ujesClients.get(serverUrl) - else - serverUrl.intern synchronized { - if (ujesClients.containsKey(serverUrl)) return ujesClients.get(serverUrl) - val ujesClient = createUJESClient(serverUrl, props) - ujesClients.put(serverUrl, ujesClient) + val user = props.getProperty(USER) + val pwd = props.getProperty(PASSWORD) + val sslEnabled = + if ( + props + .containsKey(USE_SSL) && "true".equalsIgnoreCase(props.getProperty(USE_SSL)) + ) { + true + } else { + false + } + val prefix = if (sslEnabled) { + "https" + } else { + "http" + } + val serverUrl = + if (StringUtils.isNotBlank(port)) s"$prefix://$host:$port" else "$prefix://" + host + val uniqueKey = s"${serverUrl}_${user}_${pwd}" + val uniqueKeyDes = Hex.encodeHexString(uniqueKey.getBytes(StandardCharsets.UTF_8)) + if (ujesClients.containsKey(uniqueKeyDes)) { + logger.info("Clients with the same JDBC unique key({}) will get it directly", serverUrl) + ujesClients.get(uniqueKeyDes) + } else { + uniqueKeyDes.intern synchronized { + if (ujesClients.containsKey(uniqueKeyDes)) { + logger.info("Clients with the same JDBC unique key({}) will get it directly", serverUrl) + return ujesClients.get(uniqueKeyDes) + } + logger.info( + "The same Client does not exist for the JDBC unique key({}), a new Client will be created", + serverUrl + ) + val ujesClient = createUJESClient(serverUrl, props, sslEnabled) + ujesClients.put(uniqueKeyDes, ujesClient) ujesClient } + } } - private def createUJESClient(serverUrl: String, props: Properties): UJESClient = { + private def createUJESClient( + serverUrl: String, + props: Properties, + sslEnabled: Boolean + ): UJESClient = { val clientConfigBuilder = DWSClientConfigBuilder.newBuilder() clientConfigBuilder.addServerUrl(serverUrl) clientConfigBuilder.setAuthTokenKey(props.getProperty(USER)) @@ -54,7 +89,6 @@ object UJESClientFactory extends Logging { clientConfigBuilder.setAuthenticationStrategy(new StaticAuthenticationStrategy()) clientConfigBuilder.readTimeout(100000) clientConfigBuilder.maxConnectionSize(20) - clientConfigBuilder.readTimeout(10000) val params = props.getProperty(PARAMS) var versioned = false if (StringUtils.isNotBlank(params)) { @@ -78,6 +112,10 @@ object UJESClientFactory extends Logging { } } if (!versioned) clientConfigBuilder.setDWSVersion("v" + DEFAULT_VERSION) + + if (sslEnabled) { + clientConfigBuilder.setSSL(sslEnabled) + } UJESClient(clientConfigBuilder.build()) } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLConnection.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLConnection.scala deleted file mode 100644 index 0d8403c274b..00000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLConnection.scala +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc - -import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.ujes.client.UJESClient -import org.apache.linkis.ujes.client.request.JobExecuteAction.EngineType -import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain._ - -import org.apache.commons.lang3.StringUtils - -import java.{sql, util} -import java.sql.{ - Blob, - CallableStatement, - Clob, - Connection, - DatabaseMetaData, - NClob, - PreparedStatement, - ResultSet, - Savepoint, - SQLException, - SQLWarning, - SQLXML, - Statement, - Struct -} -import java.util.Properties -import java.util.concurrent.Executor - -import scala.collection.{mutable, JavaConversions} - -class UJESSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Properties) - extends Connection - with Logging { - private[jdbc] var creator = "IDE" - - private[jdbc] val variableMap = { - val params = props.getProperty(PARAMS) - val map = new mutable.HashMap[String, AnyRef] - if (params != null) { - params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { - case Array(k, v) if k.startsWith(VARIABLE_HEADER) => - map += k.substring(VARIABLE_HEADER.length) -> v - case Array(CREATOR, v) => - creator = v - case _ => - } - } - map.toMap - } - - private[jdbc] val dbName = - if (StringUtils.isNotBlank(props.getProperty(DB_NAME))) props.getProperty(DB_NAME) - else "default" - - private val runningSQLStatements = new util.LinkedList[Statement] - - private var closed = false - - private var inited = false - - private[jdbc] val user = props.getProperty(USER) - - private[jdbc] val serverURL = props.getProperty("URL") - - private val engineTypeMap: mutable.HashMap[String, EngineType] = new mutable.HashMap() - - private[jdbc] def getEngineType: EngineType = { - if (engineTypeMap.isEmpty) { - engineTypeMap.put(EngineType.SPARK.toString, EngineType.SPARK) - engineTypeMap.put(EngineType.HIVE.toString, EngineType.HIVE) - engineTypeMap.put(EngineType.JDBC.toString, EngineType.JDBC) - engineTypeMap.put(EngineType.PYTHON.toString, EngineType.PYTHON) - engineTypeMap.put(EngineType.SHELL.toString, EngineType.SHELL) - engineTypeMap.put(EngineType.PRESTO.toString, EngineType.PRESTO) - } - val engineType: EngineType = EngineType.PRESTO - if (props.containsKey(PARAMS)) { - val params = props.getProperty(PARAMS) - if (params != null & params.length() > 0) { - params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { - case Array(k, v) if k.equals(UJESSQLDriver.ENGINE_TYPE) => return engineTypeMap(v) - case _ => - } - } - } - engineType - } - - private[jdbc] def throwWhenClosed[T](op: => T): T = - if (isClosed) throw new UJESSQLException(UJESSQLErrorCode.CONNECTION_CLOSED) - else op - - private def createStatementAndAdd[T <: Statement](op: => T): T = throwWhenClosed { - - val statement = op - runningSQLStatements.add(statement) - if (!inited) { - inited = true - Utils.tryAndWarn(statement.execute(s"USE $dbName")) - } - statement - } - - def getProps: Properties = props - - def removeStatement(statement: UJESSQLStatement): Unit = runningSQLStatements.remove(statement) - - override def createStatement(): Statement = createStatementAndAdd(new UJESSQLStatement(this)) - - override def prepareStatement(sql: String): UJESSQLPreparedStatement = { - val statement = createStatementAndAdd(new UJESSQLPreparedStatement(this, sql)) - statement.clearQuery() - statement - } - - override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = { - if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) - throw new SQLException( - "Statement with resultset concurrency " + resultSetConcurrency + " is not supported", - "HYC00" - ) - if (resultSetType == ResultSet.TYPE_SCROLL_SENSITIVE) - throw new SQLException( - "Statement with resultset type " + resultSetType + " is not supported", - "HYC00" - ) - createStatementAndAdd(new UJESSQLStatement(this)) - } - - override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = - prepareStatement(sql) - - override def prepareStatement( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int - ): PreparedStatement = prepareStatement(sql) - - override def getMetaData: DatabaseMetaData = throwWhenClosed(new UJESSQLDatabaseMetaData(this)) - - override def close(): Unit = { - JavaConversions - .asScalaBuffer(runningSQLStatements) - .foreach(statement => Utils.tryQuietly(statement.close())) - closed = true - } - - override def isClosed: Boolean = closed - - override def setReadOnly(readOnly: Boolean): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setReadOnly not supported") - - override def isReadOnly: Boolean = false - - override def setCatalog(catalog: String): Unit = throwWhenClosed() - - override def getCatalog: String = "" - - override def setTransactionIsolation(level: Int): Unit = {} - - override def getTransactionIsolation: Int = Connection.TRANSACTION_NONE - - override def getWarnings: SQLWarning = null - - override def clearWarnings(): Unit = {} - - override def setAutoCommit(autoCommit: Boolean): Unit = {} - - override def getAutoCommit: Boolean = true - - override def commit(): Unit = {} - - override def prepareCall(sql: String): CallableStatement = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "prepareCall not supported") - - override def rollback(): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") - - override def nativeSQL(sql: String): String = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "nativeSQL not supported") - - override def prepareCall( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int - ): CallableStatement = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "prepareCall not supported") - - override def getTypeMap: util.Map[String, Class[_]] = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "getTypeMap not supported") - - override def setTypeMap(map: util.Map[String, Class[_]]): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setTypeMap not supported") - - override def setHoldability(holdability: Int): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "setHoldability not supported" - ) - - override def getHoldability: Int = 0 - - override def setSavepoint(): Savepoint = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setSavepoint not supported") - - override def setSavepoint(name: String): Savepoint = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setSavepoint not supported") - - override def rollback(savepoint: Savepoint): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") - - override def releaseSavepoint(savepoint: Savepoint): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "releaseSavepoint not supported" - ) - - override def createStatement( - resultSetType: Int, - resultSetConcurrency: Int, - resultSetHoldability: Int - ): Statement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "createStatement not supported" - ) - - override def prepareStatement( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int, - resultSetHoldability: Int - ): PreparedStatement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "prepareStatement not supported" - ) - - override def prepareCall( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int, - resultSetHoldability: Int - ): CallableStatement = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "prepareCall not supported") - - override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "prepareStatement not supported" - ) - - override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "prepareStatement not supported" - ) - - override def createClob(): Clob = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createClob not supported") - - override def createBlob(): Blob = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createBlob not supported") - - override def createNClob(): NClob = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createNClob not supported") - - override def createSQLXML(): SQLXML = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createSQLXML not supported") - - override def isValid(timeout: Int): Boolean = true - - override def setClientInfo(name: String, value: String): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setClientInfo not supported") - - override def setClientInfo(properties: Properties): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "properties not supported") - - override def getClientInfo(name: String): String = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "getClientInfo not supported") - - override def getClientInfo: Properties = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "getClientInfo not supported") - - override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createArrayOf not supported") - - override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createStruct not supported") - - override def setSchema(schema: String): Unit = throwWhenClosed { - if (StringUtils.isBlank(schema)) - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "schema is empty!") - createStatement().execute("use " + schema) - } - - override def getSchema: String = throwWhenClosed { - val resultSet = createStatement().executeQuery("SELECT current_database()") - if (!resultSet.next()) - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "Get schema failed!") - resultSet.getString(1) - } - - override def abort(executor: Executor): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "abort not supported") - - override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "setNetworkTimeout not supported" - ) - - override def getNetworkTimeout: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "getNetworkTimeout not supported" - ) - - override def unwrap[T](iface: Class[T]): T = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "unwrap not supported") - - override def isWrapperFor(iface: Class[_]): Boolean = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "isWrapperFor not supported") - -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala index f2f0b9a106c..30b980f1ce0 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala @@ -29,7 +29,7 @@ import java.util import scala.collection.JavaConversions._ -class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) +class UJESSQLDatabaseMetaData(ujesSQLConnection: LinkisSQLConnection) extends DatabaseMetaData with Logging { override def allProceduresAreCallable(): Boolean = false @@ -41,7 +41,8 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getUserName: String = if (ujesSQLConnection.getProps.containsKey("user")) ujesSQLConnection.getProps.getProperty("user") - else throw new UJESSQLException(UJESSQLErrorCode.PARAMS_NOT_FOUND, "Missing user information") + else + throw new LinkisSQLException(LinkisSQLErrorCode.PARAMS_NOT_FOUND, "Missing user information") override def isReadOnly: Boolean = false @@ -88,7 +89,10 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getIdentifierQuoteString: String = " " override def getSQLKeywords: String = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getSQLKeywords not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getSQLKeywords not supported" + ) override def getNumericFunctions: String = "" @@ -162,8 +166,8 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getCatalogTerm: String = "instance" - override def isCatalogAtStart: Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def isCatalogAtStart: Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "isCatalogAtStart not supported" ) @@ -219,103 +223,106 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsOpenStatementsAcrossRollback(): Boolean = false - override def getMaxBinaryLiteralLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxBinaryLiteralLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxBinaryLiteralLength not supported" ) - override def getMaxCharLiteralLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxCharLiteralLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxCharLiteralLength not supported" ) override def getMaxColumnNameLength: Int = 128 - override def getMaxColumnsInGroupBy: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInGroupBy: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInGroupBy not supported" ) - override def getMaxColumnsInIndex: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInIndex: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInIndex not supported" ) - override def getMaxColumnsInOrderBy: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInOrderBy: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInOrderBy not supported" ) - override def getMaxColumnsInSelect: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInSelect: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInSelect not supported" ) - override def getMaxColumnsInTable: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInTable: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInTable not supported" ) - override def getMaxConnections: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxConnections: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxConnections not supported" ) - override def getMaxCursorNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxCursorNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxCursorNameLength not supported" ) - override def getMaxIndexLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxIndexLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxIndexLength not supported" ) - override def getMaxSchemaNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxSchemaNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxSchemaNameLength not supported" ) - override def getMaxProcedureNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxProcedureNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxProcedureNameLength not supported" ) - override def getMaxCatalogNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxCatalogNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxCatalogNameLength not supported" ) override def getMaxRowSize: Int = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getMaxRowSize not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getMaxRowSize not supported" + ) - override def doesMaxRowSizeIncludeBlobs(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def doesMaxRowSizeIncludeBlobs(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "doesMaxRowSizeIncludeBlobs not supported" ) - override def getMaxStatementLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxStatementLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxStatementLength not supported" ) - override def getMaxStatements: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxStatements: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxStatements not supported" ) - override def getMaxTableNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxTableNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxTableNameLength not supported" ) - override def getMaxTablesInSelect: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxTablesInSelect: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxTablesInSelect not supported" ) - override def getMaxUserNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxUserNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxUserNameLength not supported" ) @@ -329,13 +336,13 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsDataManipulationTransactionsOnly(): Boolean = false - override def dataDefinitionCausesTransactionCommit(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def dataDefinitionCausesTransactionCommit(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "dataDefinitionCausesTransactionCommit not supported" ) - override def dataDefinitionIgnoredInTransactions(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def dataDefinitionIgnoredInTransactions(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "dataDefinitionIgnoredInTransactions not supported" ) @@ -358,7 +365,9 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) tableNamePattern: String, types: Array[String] ): ResultSet = { - val resultCatalog = if (StringUtils.isNotBlank(catalog)) { + val resultCatalog = if (StringUtils.isNotBlank(schemaPattern)) { + schemaPattern + } else if (StringUtils.isNotBlank(catalog)) { catalog } else { s"${getUserName}_ind" @@ -374,11 +383,18 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) if (table.get("isView").asInstanceOf[Boolean]) TableType.VIEW.name() else TableType.TABLE.name() val resultTable = new util.HashMap[String, String]() + val tableName = table.get("tableName").asInstanceOf[String] resultTable.put("catalog", resultCatalog) - resultTable.put("tableName", table.get("tableName").asInstanceOf[String]) + resultTable.put("tableName", tableName) resultTable.put("tableType", tableType) if (null == types || types.contains(tableType)) { - resultTables.add(resultTable) + if ( + StringUtils.isNotBlank(tableNamePattern) && tableNamePattern.equalsIgnoreCase(tableName) + ) { + resultTables.add(resultTable) + } else if (StringUtils.isBlank(tableNamePattern)) { + resultTables.add(resultTable) + } } } val resultSet: LinkisMetaDataResultSet[util.Map[String, String]] = @@ -471,7 +487,9 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) tableNamePattern: String, columnNamePattern: String ): ResultSet = { - val resultCatalog = if (StringUtils.isNotBlank(catalog)) { + val resultCatalog = if (StringUtils.isNotBlank(schemaPattern)) { + schemaPattern + } else if (StringUtils.isNotBlank(catalog)) { catalog } else { s"${getUserName}_ind" @@ -670,48 +688,48 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsResultSetConcurrency(`type`: Int, concurrency: Int): Boolean = false - override def ownUpdatesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def ownUpdatesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "ownUpdatesAreVisible not supported" ) - override def ownDeletesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def ownDeletesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "ownDeletesAreVisible not supported" ) - override def ownInsertsAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def ownInsertsAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "ownInsertsAreVisible not supported" ) - override def othersUpdatesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def othersUpdatesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "othersUpdatesAreVisible not supported" ) - override def othersDeletesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def othersDeletesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "othersDeletesAreVisible not supported" ) - override def othersInsertsAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def othersInsertsAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "othersInsertsAreVisible not supported" ) - override def updatesAreDetected(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def updatesAreDetected(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "updatesAreDetected not supported" ) - override def deletesAreDetected(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def deletesAreDetected(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "deletesAreDetected not supported" ) - override def insertsAreDetected(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def insertsAreDetected(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "insertsAreDetected not supported" ) @@ -739,14 +757,20 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) schemaPattern: String, typeNamePattern: String ): ResultSet = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getSuperTypes not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getSuperTypes not supported" + ) override def getSuperTables( catalog: String, schemaPattern: String, tableNamePattern: String ): ResultSet = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getSuperTables not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getSuperTables not supported" + ) override def getAttributes( catalog: String, @@ -754,12 +778,15 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) typeNamePattern: String, attributeNamePattern: String ): ResultSet = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getAttributes not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getAttributes not supported" + ) override def supportsResultSetHoldability(holdability: Int): Boolean = false - override def getResultSetHoldability: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getResultSetHoldability: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getResultSetHoldability not supported" ) @@ -773,15 +800,15 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getSQLStateType: Int = 2 - override def locatorsUpdateCopy(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def locatorsUpdateCopy(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "locatorsUpdateCopy not supported" ) override def supportsStatementPooling(): Boolean = false - override def getRowIdLifetime: RowIdLifetime = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getRowIdLifetime: RowIdLifetime = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getRowIdLifetime not supported" ) @@ -794,13 +821,13 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsStoredFunctionsUsingCallSyntax(): Boolean = false - override def autoCommitFailureClosesAllResultSets(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def autoCommitFailureClosesAllResultSets(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "autoCommitFailureClosesAllResultSets not supported" ) - override def getClientInfoProperties: ResultSet = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getClientInfoProperties: ResultSet = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getClientInfoProperties not supported" ) @@ -824,15 +851,18 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) columnNamePattern: String ): ResultSet = null - override def generatedKeyAlwaysReturned(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def generatedKeyAlwaysReturned(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "generatedKeyAlwaysReturned not supported" ) override def unwrap[T](iface: Class[T]): T = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "unwrap not supported") + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA, "unwrap not supported") override def isWrapperFor(iface: Class[_]): Boolean = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "isWrapperFor not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "isWrapperFor not supported" + ) } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala index 783713cf404..44686981e80 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala @@ -29,10 +29,10 @@ import java.sql.{ DriverPropertyInfo, SQLFeatureNotSupportedException } -import java.util.Properties +import java.util.{Locale, Properties} import java.util.logging.Logger -import scala.collection.JavaConversions +import scala.collection.JavaConverters._ class UJESSQLDriverMain extends Driver with Logging { @@ -41,8 +41,10 @@ class UJESSQLDriverMain extends Driver with Logging { props.putAll(parseURL(url)) logger.info(s"input url:$url, properties:$properties") val ujesClient = UJESClientFactory.getUJESClient(props) - new UJESSQLConnection(ujesClient, props) - } else throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url: " + url) + new LinkisSQLConnection(ujesClient, props) + } else { + null + } override def acceptsURL(url: String): Boolean = url.startsWith(URL_PREFIX) @@ -70,20 +72,28 @@ class UJESSQLDriverMain extends Driver with Logging { case Array(TOKEN_VALUE, value) => props.setProperty(TOKEN_VALUE, value) false - case Array(LIMIT, value) => - props.setProperty(LIMIT, value) - UJESSQLDriverMain.LIMIT_ENABLED = value.toLowerCase() + case Array(FIXED_SESSION, value) => + props.setProperty(FIXED_SESSION, value) + false + case Array(USE_SSL, value) => + props.setProperty(USE_SSL, value) + false + case Array(ENABLE_MULTI_RESULT, value) => + props.setProperty(ENABLE_MULTI_RESULT, value) false case Array(key, _) => if (StringUtils.isBlank(key)) { - throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url for params: " + url) + throw new LinkisSQLException( + LinkisSQLErrorCode.BAD_URL, + "bad url for params: " + url + ) } else true case _ => - throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url for params: " + url) + throw new LinkisSQLException(LinkisSQLErrorCode.BAD_URL, "bad url for params: " + url) } props.setProperty(PARAMS, kvs.map(_.mkString(KV_SPLIT)).mkString(PARAM_SPLIT)) } - case _ => throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url: " + url) + case _ => throw new LinkisSQLException(LinkisSQLErrorCode.BAD_URL, "bad url: " + url) } props } @@ -133,8 +143,10 @@ object UJESSQLDriverMain { val TOKEN_VALUE = UJESSQLDriver.TOKEN_VALUE val PASSWORD = UJESSQLDriver.PASSWORD val TABLEAU_SERVER = UJESSQLDriver.TABLEAU_SERVER - val LIMIT = UJESSQLDriver.LIMIT - var LIMIT_ENABLED = UJESSQLDriver.LIMIT_ENABLED + val FIXED_SESSION = UJESSQLDriver.FIXED_SESSION + val ENABLE_MULTI_RESULT = UJESSQLDriver.ENABLE_MULTI_RESULT + + val USE_SSL = UJESSQLDriver.USE_SSL val VERSION = UJESSQLDriver.VERSION val DEFAULT_VERSION = UJESSQLDriver.DEFAULT_VERSION @@ -144,14 +156,15 @@ object UJESSQLDriverMain { val ENABLE_LOADBALANCER = UJESSQLDriver.ENABLE_LOADBALANCER val CREATOR = UJESSQLDriver.CREATOR + val TABLEAU = UJESSQLDriver.TABLEAU + val VARIABLE_HEADER = UJESSQLDriver.VARIABLE_HEADER def getConnectionParams( connectionParams: String, variableMap: java.util.Map[String, Any] ): String = { - val variables = JavaConversions - .mapAsScalaMap(variableMap) + val variables = variableMap.asScala .map(kv => VARIABLE_HEADER + kv._1 + KV_SPLIT + kv._2) .mkString(PARAM_SPLIT) if (StringUtils.isNotBlank(connectionParams)) connectionParams + PARAM_SPLIT + variables @@ -179,17 +192,20 @@ object UJESSQLDriverMain { ): String = { val sb = new StringBuilder if (StringUtils.isNotBlank(version)) sb.append(VERSION).append(KV_SPLIT).append(version) - if (maxConnectionSize > 0) + if (maxConnectionSize > 0) { sb.append(PARAM_SPLIT).append(MAX_CONNECTION_SIZE).append(KV_SPLIT).append(maxConnectionSize) - if (readTimeout > 0) + } + if (readTimeout > 0) { sb.append(PARAM_SPLIT).append(READ_TIMEOUT).append(KV_SPLIT).append(readTimeout) + } if (enableDiscovery) { sb.append(PARAM_SPLIT).append(ENABLE_DISCOVERY).append(KV_SPLIT).append(enableDiscovery) - if (enableLoadBalancer) + if (enableLoadBalancer) { sb.append(PARAM_SPLIT) .append(ENABLE_LOADBALANCER) .append(KV_SPLIT) .append(enableLoadBalancer) + } } if (sb.startsWith(PARAM_SPLIT)) sb.toString.substring(PARAM_SPLIT.length) else sb.toString } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLException.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLException.scala deleted file mode 100644 index 25db6f9381a..00000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLException.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc - -import org.apache.linkis.common.exception.ErrorException - -class UJESSQLException(errorCode: UJESSQLErrorCode) - extends ErrorException(errorCode.getCode, errorCode.getMsg) { - - def this(errorCode: UJESSQLErrorCode, msg: String) { - this(errorCode) - setErrCode(errorCode.getCode) - setDesc(msg) - } - - /** - * add to deal with errorinfo derived from jobInfo - * @param errorCode - * @param msg - */ - def this(errorCode: Int, msg: String) { - this(UJESSQLErrorCode.ERRORINFO_FROM_JOBINFO) - setDesc(msg) - setErrCode(errorCode) - } - -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatement.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatement.scala deleted file mode 100644 index 6328da99e20..00000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatement.scala +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc - -import java.io.{InputStream, Reader} -import java.net.URL -import java.sql.{ - Blob, - Clob, - Date, - NClob, - ParameterMetaData, - PreparedStatement, - Ref, - ResultSetMetaData, - RowId, - SQLXML, - Time, - Timestamp -} -import java.util -import java.util.Calendar - -class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String) - extends UJESSQLStatement(ujesSQLConnection) - with PreparedStatement { - - private val parameters = new util.HashMap[Int, Any] - - private var parameterMetaData: ParameterMetaData = _ - - private var batchTimes = 0 - - private def updateSql(sql: String, parameters: util.HashMap[Int, Any]): String = { - if (!sql.contains("?")) { - sql - } else { - val newSql = new StringBuilder(sql) - for (paramLoc <- 1 to parameters.size()) { - if (parameters.containsKey(paramLoc)) { - val charIndex = getCharIndexFromSqlByParamLocation(newSql.toString(), '?', 1) - newSql.deleteCharAt(charIndex) - newSql.insert(charIndex, parameters.get(paramLoc).asInstanceOf[String]) - } - } - newSql.toString() - } - } - - private def getCharIndexFromSqlByParamLocation(sql: String, cchar: Char, paramLoc: Int): Int = { - var signalCount = 0 - var charIndex = -1 - var num = 0 - for (i <- 0 to sql.length - 1 if charIndex == -1) { - val c = sql.charAt(i) - if (c != '\'' && c != '\\') { - if (c == cchar && signalCount % 2 == 0) { - num += 1 - if (num == paramLoc) { - charIndex = i - } - } - } else { - signalCount += signalCount - } - } - charIndex - } - - override def executeQuery(): UJESSQLResultSet = { - super.executeQuery(updateSql(sql, parameters)) - } - - override def executeUpdate(): Int = { - super.executeUpdate(updateSql(sql, parameters)) - } - - override def setNull(parameterIndex: Int, sqlType: Int): Unit = { - parameters.put(parameterIndex, "NULL") - } - - override def setBoolean(parameterIndex: Int, x: Boolean): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setByte(parameterIndex: Int, x: Byte): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setShort(parameterIndex: Int, x: Short): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setInt(parameterIndex: Int, x: Int): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setLong(parameterIndex: Int, x: Long): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setFloat(parameterIndex: Int, x: Float): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setDouble(parameterIndex: Int, x: Double): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setBigDecimal(parameterIndex: Int, x: java.math.BigDecimal): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setString(parameterIndex: Int, x: String): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setBytes(parameterIndex: Int, x: Array[Byte]): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setDate(parameterIndex: Int, x: Date): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setTime(parameterIndex: Int, x: Time): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = { - parameters.put(parameterIndex, x + "") - } - - override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def clearParameters(): Unit = { - parameters.clear() - } - - override def setObject(parameterIndex: Int, x: scala.Any, targetSqlType: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setObject(parameterIndex: Int, x: scala.Any): Unit = { - x match { - case null => setNull(parameterIndex, 0) - case x: String => setString(parameterIndex, x) - case x: Short => setShort(parameterIndex, x) - case x: Int => setInt(parameterIndex, x) - case x: Long => setLong(parameterIndex, x) - case x: Float => setFloat(parameterIndex, x) - case x: Double => setDouble(parameterIndex, x) - case x: Boolean => setBoolean(parameterIndex, x) - case x: Byte => setByte(parameterIndex, x) - case x: Char => setString(parameterIndex, x.toString) - case x: Timestamp => setTimestamp(parameterIndex, x) - case _ => - throw new UJESSQLException( - UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR, - s"Can''t infer the SQL type to use for an instance of ${x.getClass.getName}. Use setObject() with an explicit Types value to specify the type to use" - ) - } - } - - override def execute(): Boolean = { - val res = super.execute(updateSql(sql, parameters)) - for (i <- 1 to batchTimes) { - super.execute(updateSql(sql, parameters)) - } - res - } - - override def executeBatch(): Array[Int] = { - - for (i <- 0 to batchTimes) { - super.execute(updateSql(sql, parameters)) - } - Array(1, 1) - } - - override def addBatch(): Unit = { - this.batchTimes = this.batchTimes + 1 - } - - override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setRef(parameterIndex: Int, x: Ref): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setBlob(parameterIndex: Int, x: Blob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setClob(parameterIndex: Int, x: Clob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setArray(parameterIndex: Int, x: java.sql.Array): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def getMetaData: ResultSetMetaData = { - if (super.getResultSet == null) { - return null - } - super.getResultSet.getMetaData - } - - override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = { - parameters.put(parameterIndex, "NULL") - } - - override def setURL(parameterIndex: Int, x: URL): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def getParameterMetaData: ParameterMetaData = { - synchronized { - if (null == this.parameterMetaData) { - this.parameterMetaData = new LinkisParameterMetaData(sql.count(_ == '?')) - } - } - this.parameterMetaData - } - - override def setRowId(parameterIndex: Int, x: RowId): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setNString(parameterIndex: Int, value: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setNClob(parameterIndex: Int, value: NClob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setObject( - parameterIndex: Int, - x: scala.Any, - targetSqlType: Int, - scaleOrLength: Int - ): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setClob(parameterIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def setNClob(parameterIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) - } - - override def getResultSetType: Int = { - super.getResultSetType - } - -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala index b8cf1b23b1b..be37a77f91c 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala @@ -20,6 +20,7 @@ package org.apache.linkis.ujes.jdbc import org.apache.linkis.common.utils.Logging import org.apache.linkis.ujes.client.request.ResultSetAction import org.apache.linkis.ujes.client.response.ResultSetResult +import org.apache.linkis.ujes.client.utils.UJESClientUtils import org.apache.commons.lang3.StringUtils @@ -42,7 +43,7 @@ import java.sql.{ Time, Timestamp } -import java.util.Calendar +import java.util.{Calendar, Locale} import org.joda.time.DateTimeZone import org.joda.time.format.{ @@ -54,7 +55,7 @@ import org.joda.time.format.{ class UJESSQLResultSet( resultSetList: Array[String], - ujesStatement: UJESSQLStatement, + ujesStatement: LinkisSQLStatement, maxRows: Int, fetchSize: Int ) extends ResultSet @@ -75,10 +76,11 @@ class UJESSQLResultSet( private val pageSize: Int = 5000 private var path: String = _ private var metaData: util.List[util.Map[String, String]] = _ - private val statement: UJESSQLStatement = ujesStatement + private val statement: LinkisSQLStatement = ujesStatement + private var nextResultSet: UJESSQLResultSet = _ - private val connection: UJESSQLConnection = - ujesStatement.getConnection.asInstanceOf[UJESSQLConnection] + private val connection: LinkisSQLConnection = + ujesStatement.getConnection.asInstanceOf[LinkisSQLConnection] private var valueWasNull: Boolean = false private var warningChain: SQLWarning = _ @@ -96,9 +98,21 @@ class UJESSQLResultSet( .toFormatter .withOffsetParsed + private val STRING_TYPE = "string" + + private val NULL_VALUE = "NULL" + private def getResultSetPath(resultSetList: Array[String]): String = { if (resultSetList.length > 0) { - resultSetList(resultSetList.length - 1) + val enableMultiResult = connection.getProps.getProperty(UJESSQLDriverMain.ENABLE_MULTI_RESULT) + enableMultiResult match { + case "Y" => + // 配置开启时,返回首个结果集 + resultSetList(0) + case _ => + // 配置关闭时,返回以最后一个结果集为准 + resultSetList(resultSetList.length - 1) + } } else { "" } @@ -106,6 +120,12 @@ class UJESSQLResultSet( private def resultSetResultInit(): Unit = { if (path == null) path = getResultSetPath(resultSetList) + // 设置下一个结果集 + val enableMultiResult = connection.getProps.getProperty(UJESSQLDriverMain.ENABLE_MULTI_RESULT) + if (resultSetList.length > 1 && "Y".equals(enableMultiResult)) { + this.nextResultSet = + new UJESSQLResultSet(resultSetList.drop(1), this.statement, maxRows, fetchSize) + } val user = connection.getProps.getProperty("user") if (StringUtils.isNotBlank(path)) { val resultAction = @@ -155,12 +175,25 @@ class UJESSQLResultSet( if (null == resultSetResult) { return } - metaData = resultSetResult.getMetadata.asInstanceOf[util.List[util.Map[String, String]]] - for (cursor <- 1 to metaData.size()) { - val col = metaData.get(cursor - 1) - resultSetMetaData.setColumnNameProperties(cursor, col.get("columnName")) - resultSetMetaData.setDataTypeProperties(cursor, col.get("dataType")) - resultSetMetaData.setCommentPropreties(cursor, col.get("comment")) + val metaTmp = resultSetResult.getMetadata + if (NULL_VALUE.equals(String.valueOf(metaTmp))) { + val fileContentList = + resultSetResult.getFileContent.asInstanceOf[util.List[util.List[String]]] + if (null != fileContentList) { + resultSetMetaData.setColumnNameProperties(1, "linkis_string") + resultSetMetaData.setDataTypeProperties(1, "String") + resultSetMetaData.setCommentPropreties(1, NULL_VALUE) + } + } else { + metaData = metaTmp.asInstanceOf[util.List[util.Map[String, String]]] + if (null != metaData) { + for (cursor <- 1 to metaData.size()) { + val col = metaData.get(cursor - 1) + resultSetMetaData.setColumnNameProperties(cursor, col.get("columnName")) + resultSetMetaData.setDataTypeProperties(cursor, col.get("dataType")) + resultSetMetaData.setCommentPropreties(cursor, col.get("comment")) + } + } } } @@ -189,7 +222,7 @@ class UJESSQLResultSet( if (metaData == null) init() currentRowCursor += 1 if (null == resultSetRow || currentRowCursor > resultSetRow.size() - 1) { - if (UJESSQLDriverMain.LIMIT_ENABLED.equals("false") && !isCompleted) { + if (!isCompleted) { updateResultSet() if (isCompleted) { return false @@ -229,48 +262,20 @@ class UJESSQLResultSet( } private def evaluate(dataType: String, value: String): Any = { - if (value == null || value.equals("null") || value.equals("NULL") || value.equals("Null")) { - value - } else { - dataType.toLowerCase match { - case null => throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) - case "string" => value.toString - case "short" => value.toShort - case "int" => value.toInt - case "long" => value.toLong - case "float" => value.toFloat - case "double" => value.toDouble - case "boolean" => value.toBoolean - case "byte" => value.toByte - case "char" => value.toString - case "timestamp" => value.toString - case "varchar" => value.toString - case "nvarchar" => value.toString - case "date" => value.toString - case "bigint" => value.toLong - case "decimal" => value.toDouble - case "array" => value.toArray - case "map" => value - case _ => - throw new UJESSQLException( - UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR, - s"Can't infer the SQL type to use for an instance of ${dataType}. Use getObject() with an explicit Types value to specify the type to use" - ) - } - } + UJESClientUtils.evaluate(dataType, value) } private def getColumnValue(columnIndex: Int): Any = { if (currentRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "No row found.") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "No row found.") } else if (currentRow.size() <= 0) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, "RowSet does not contain any columns!" ) } else if (columnIndex > currentRow.size()) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, s" Invalid columnIndex: ${columnIndex}" ) } else { @@ -285,7 +290,7 @@ class UJESSQLResultSet( override def getString(columnIndex: Int): String = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case c: Character => Character.toString(c) @@ -294,10 +299,14 @@ class UJESSQLResultSet( } } + def clearNextResultSet: Any = { + this.nextResultSet = null + } + override def getBoolean(columnIndex: Int): Boolean = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case s: String => @@ -313,7 +322,7 @@ class UJESSQLResultSet( override def getByte(columnIndex: Int): Byte = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Byte] } @@ -322,7 +331,7 @@ class UJESSQLResultSet( override def getShort(columnIndex: Int): Short = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Short] } @@ -331,7 +340,7 @@ class UJESSQLResultSet( override def getInt(columnIndex: Int): Int = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case i: Integer => i.asInstanceOf[Int] @@ -344,7 +353,7 @@ class UJESSQLResultSet( override def getLong(columnIndex: Int): Long = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case i: Integer => i.longValue() @@ -357,7 +366,7 @@ class UJESSQLResultSet( override def getFloat(columnIndex: Int): Float = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Float] } @@ -366,7 +375,7 @@ class UJESSQLResultSet( override def getDouble(columnIndex: Int): Double = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case _: String => 0.0d @@ -379,7 +388,7 @@ class UJESSQLResultSet( val mc = new MathContext(scale) val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case double: Double => new java.math.BigDecimal(double).round(mc) @@ -393,7 +402,7 @@ class UJESSQLResultSet( override def getBytes(columnIndex: Int): Array[Byte] = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Array[Byte]] } @@ -404,35 +413,35 @@ class UJESSQLResultSet( val any = getColumnValue(columnIndex) logger.info(s"the value of Date is $any") if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Date] } } override def getTime(columnIndex: Int): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getTimestamp(columnIndex: Int): Timestamp = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Timestamp] } } override def getAsciiStream(columnIndex: Int): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getUnicodeStream(columnIndex: Int): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBinaryStream(columnIndex: Int): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getString(columnLabel: String): String = { @@ -480,7 +489,7 @@ class UJESSQLResultSet( } override def getTime(columnLabel: String): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getTimestamp(columnLabel: String): Timestamp = { @@ -488,15 +497,15 @@ class UJESSQLResultSet( } override def getAsciiStream(columnLabel: String): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getUnicodeStream(columnLabel: String): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBinaryStream(columnLabel: String): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getWarnings: SQLWarning = { @@ -508,7 +517,7 @@ class UJESSQLResultSet( } override def getCursorName: String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getMetaData: UJESSQLResultSetMetaData = { @@ -518,11 +527,7 @@ class UJESSQLResultSet( override def getObject(columnIndex: Int): Object = { val any = getColumnValue(columnIndex) - if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") - } else { - any.asInstanceOf[Object] - } + any.asInstanceOf[Object] } override def getObject(columnLabel: String): Object = { @@ -542,19 +547,19 @@ class UJESSQLResultSet( } } if (columnIndex == -1) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, s"can not find column: ${columnLabel}" ) } else columnIndex } override def getCharacterStream(columnIndex: Int): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getCharacterStream(columnLabel: String): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = { @@ -567,31 +572,31 @@ class UJESSQLResultSet( override def isBeforeFirst: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor == -1 } override def isAfterLast: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor > resultSetRow.size() - 1 } override def isFirst: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor == 0 } override def isLast: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor == resultSetRow.size() - 1 } override def beforeFirst(): Unit = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else { currentRowCursor = -1 updateCurrentRow(currentRowCursor) @@ -600,7 +605,7 @@ class UJESSQLResultSet( override def afterLast(): Unit = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else { currentRowCursor = resultSetRow.size() updateCurrentRow(currentRowCursor) @@ -627,7 +632,7 @@ class UJESSQLResultSet( override def getRow: Int = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else { currentRowCursor + 1 } @@ -635,10 +640,10 @@ class UJESSQLResultSet( override def absolute(row: Int): Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else if (row > resultSetRow.size()) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, "The specified number of rows is greater than the maximum number of rows" ) } else { @@ -654,10 +659,10 @@ class UJESSQLResultSet( override def relative(rows: Int): Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else if (rows > resultSetRow.size()) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, "The specified number of rows is greater than the maximum number of rows" ) } else { @@ -678,12 +683,14 @@ class UJESSQLResultSet( true } + def getNextResultSet: UJESSQLResultSet = this.nextResultSet + override def setFetchDirection(direction: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getFetchDirection: Int = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def setFetchSize(rows: Int): Unit = { @@ -703,248 +710,248 @@ class UJESSQLResultSet( } override def rowUpdated(): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def rowInserted(): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def rowDeleted(): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNull(columnIndex: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBoolean(columnIndex: Int, x: Boolean): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateByte(columnIndex: Int, x: Byte): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateShort(columnIndex: Int, x: Short): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateInt(columnIndex: Int, x: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateLong(columnIndex: Int, x: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateFloat(columnIndex: Int, x: Float): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDouble(columnIndex: Int, x: Double): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBigDecimal(columnIndex: Int, x: java.math.BigDecimal): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateString(columnIndex: Int, x: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBytes(columnIndex: Int, x: Array[Byte]): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDate(columnIndex: Int, x: Date): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTime(columnIndex: Int, x: Time): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTimestamp(columnIndex: Int, x: Timestamp): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnIndex: Int, x: Reader, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnIndex: Int, x: scala.Any, scaleOrLength: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnIndex: Int, x: scala.Any): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNull(columnLabel: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBoolean(columnLabel: String, x: Boolean): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateByte(columnLabel: String, x: Byte): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateShort(columnLabel: String, x: Short): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateInt(columnLabel: String, x: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateLong(columnLabel: String, x: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateFloat(columnLabel: String, x: Float): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDouble(columnLabel: String, x: Double): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBigDecimal(columnLabel: String, x: java.math.BigDecimal): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateString(columnLabel: String, x: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBytes(columnLabel: String, x: Array[Byte]): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDate(columnLabel: String, x: Date): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTime(columnLabel: String, x: Time): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTimestamp(columnLabel: String, x: Timestamp): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnLabel: String, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnLabel: String, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnLabel: String, reader: Reader, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnLabel: String, x: scala.Any, scaleOrLength: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnLabel: String, x: scala.Any): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def insertRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def deleteRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def refreshRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def cancelRowUpdates(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def moveToInsertRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def moveToCurrentRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getStatement: Statement = { if (statement != null && !hasClosed) { statement.asInstanceOf[Statement] - } else throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED) + } else throw new LinkisSQLException(LinkisSQLErrorCode.STATEMENT_CLOSED) } override def getObject(columnIndex: Int, map: util.Map[String, Class[_]]): AnyRef = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRef(columnIndex: Int): Ref = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBlob(columnIndex: Int): Blob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getClob(columnIndex: Int): Clob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getArray(columnIndex: Int): sql.Array = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getObject(columnLabel: String, map: util.Map[String, Class[_]]): AnyRef = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRef(columnLabel: String): Ref = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBlob(columnLabel: String): Blob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getClob(columnLabel: String): Clob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getArray(columnLabel: String): sql.Array = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } private def getDate(columnIndex: Int, localTimeZone: DateTimeZone): Date = { val value = getColumnValue(columnIndex) logger.info(s"the value of value is $value and the value of localTimeZone is $localTimeZone") if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else new Date(DATE_FORMATTER.withZone(localTimeZone).parseMillis(String.valueOf(value))); } @@ -958,11 +965,11 @@ class UJESSQLResultSet( } override def getTime(columnIndex: Int, cal: Calendar): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getTime(columnLabel: String, cal: Calendar): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } private def getTimestamp(columnIndex: Int, localTimeZone: DateTimeZone): Timestamp = { @@ -970,8 +977,9 @@ class UJESSQLResultSet( logger.info(s"the value of value is $value and the value of localTimeZone is $localTimeZone") if (wasNull()) { null - } else + } else { new Timestamp(TIMESTAMP_FORMATTER.withZone(localTimeZone).parseMillis(String.valueOf(value))) + } } override def getTimestamp(columnIndex: Int, cal: Calendar): Timestamp = { @@ -986,63 +994,63 @@ class UJESSQLResultSet( } override def getURL(columnIndex: Int): URL = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getURL(columnLabel: String): URL = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRef(columnIndex: Int, x: Ref): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRef(columnLabel: String, x: Ref): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnIndex: Int, x: Blob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnLabel: String, x: Blob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnIndex: Int, x: Clob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnLabel: String, x: Clob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateArray(columnIndex: Int, x: sql.Array): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateArray(columnLabel: String, x: sql.Array): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRowId(columnIndex: Int): RowId = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRowId(columnLabel: String): RowId = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRowId(columnIndex: Int, x: RowId): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRowId(columnLabel: String, x: RowId): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getHoldability: Int = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def isClosed: Boolean = { @@ -1050,187 +1058,187 @@ class UJESSQLResultSet( } override def updateNString(columnIndex: Int, nString: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNString(columnLabel: String, nString: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnIndex: Int, nClob: NClob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnLabel: String, nClob: NClob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNClob(columnIndex: Int): NClob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNClob(columnLabel: String): NClob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getSQLXML(columnIndex: Int): SQLXML = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getSQLXML(columnLabel: String): SQLXML = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateSQLXML(columnIndex: Int, xmlObject: SQLXML): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateSQLXML(columnLabel: String, xmlObject: SQLXML): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNString(columnIndex: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNString(columnLabel: String): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNCharacterStream(columnIndex: Int): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNCharacterStream(columnLabel: String): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnLabel: String, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnLabel: String, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnIndex: Int, inputStream: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnLabel: String, inputStream: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnIndex: Int, x: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnIndex: Int, x: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnLabel: String, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnLabel: String, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnIndex: Int, inputStream: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnLabel: String, inputStream: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getObject[T](columnIndex: Int, `type`: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getObject[T](columnLabel: String, `type`: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def unwrap[T](iface: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def isWrapperFor(iface: Class[_]): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala index e51309e8858..eaeb1b25d84 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala @@ -33,19 +33,19 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { private[jdbc] def setColumnNameProperties(column: Int, columnName: String): Unit = { if (column != null && columnName != null) { columnNameProperties.put(column, columnName) - } else throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + } else throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } private[jdbc] def setDataTypeProperties(column: Int, columnName: String): Unit = { if (column != null && columnName != null) { dataTypeProperties.put(column, columnName) - } else throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + } else throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } private[jdbc] def setCommentPropreties(column: Int, columnName: String): Unit = { if (column != null && columnName != null) { commentProperties.put(column, columnName) - } else throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + } else throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } override def getColumnCount: Int = { @@ -53,7 +53,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def isAutoIncrement(column: Int): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def isCaseSensitive(column: Int): Boolean = true @@ -74,7 +74,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { override def getColumnLabel(column: Int): String = { if (columnNameProperties.get(column) == null) { - throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } else columnNameProperties.get(column) } @@ -83,7 +83,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def getSchemaName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getPrecision(column: Int): Int = { @@ -102,11 +102,11 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def getTableName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getCatalogName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getColumnType(column: Int): Int = { @@ -115,7 +115,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { override def getColumnTypeName(column: Int): String = { if (dataTypeProperties.get(column) == null) { - throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } else dataTypeProperties.get(column) } @@ -128,19 +128,19 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def isDefinitelyWritable(column: Int): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getColumnClassName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def unwrap[T](iface: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def isWrapperFor(iface: Class[_]): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } protected def toZeroIndex(column: Int): Int = { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLStatement.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLStatement.scala deleted file mode 100644 index 916bdbaa931..00000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLStatement.scala +++ /dev/null @@ -1,302 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc - -import org.apache.linkis.common.exception.ErrorException -import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.ujes.client.request.JobExecuteAction -import org.apache.linkis.ujes.client.request.JobExecuteAction.EngineType -import org.apache.linkis.ujes.client.response.JobExecuteResult -import org.apache.linkis.ujes.jdbc.hook.JDBCDriverPreExecutionHook - -import java.sql.{Connection, ResultSet, SQLWarning, Statement} -import java.util.concurrent.TimeUnit - -import scala.collection.JavaConverters.mapAsJavaMapConverter -import scala.concurrent.TimeoutException -import scala.concurrent.duration.Duration - -class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) - extends Statement - with Logging { - - private var jobExecuteResult: JobExecuteResult = _ - private var resultSet: UJESSQLResultSet = _ - private var closed = false - private var maxRows: Int = 0 - private var fetchSize = 100 - private var queryTimeout = 0 - - private var queryEnd = false - - private[jdbc] def throwWhenClosed[T](op: => T): T = ujesSQLConnection.throwWhenClosed { - if (isClosed) throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED) - else op - } - - override def executeQuery(sql: String): UJESSQLResultSet = { - if (!execute(sql)) throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) - resultSet - } - - override def executeUpdate(sql: String): Int = { - execute(sql) - 0 - } - - override def close(): Unit = { - closed = true - clearQuery() - } - - def clearQuery(): Unit = { - if (jobExecuteResult != null && !queryEnd) { - Utils.tryAndWarn(ujesSQLConnection.ujesClient.kill(jobExecuteResult)) - jobExecuteResult = null - } - if (resultSet != null) { - Utils.tryAndWarn(resultSet.close()) - resultSet = null - } - } - - override def getMaxFieldSize: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "getMaxFieldSize not supported" - ) - - override def setMaxFieldSize(max: Int): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "setMaxFieldSize not supported" - ) - - override def getMaxRows: Int = maxRows - - override def setMaxRows(max: Int): Unit = this.maxRows = max - - override def setEscapeProcessing(enable: Boolean): Unit = if (enable) - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "setEscapeProcessing not supported" - ) - - override def getQueryTimeout: Int = queryTimeout - - override def setQueryTimeout(seconds: Int): Unit = throwWhenClosed(queryTimeout = seconds * 1000) - - override def cancel(): Unit = throwWhenClosed(clearQuery()) - - override def getWarnings: SQLWarning = null - - override def clearWarnings(): Unit = {} - - override def setCursorName(name: String): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "setCursorName not supported") - - override def execute(sql: String): Boolean = Utils.tryCatch(throwWhenClosed { - var parsedSQL = sql - JDBCDriverPreExecutionHook.getPreExecutionHooks.foreach { preExecution => - parsedSQL = preExecution.callPreExecutionHook(parsedSQL) - } - logger.info(s"begin to execute sql ${parsedSQL}") - val action = JobExecuteAction - .builder() - .setEngineType(ujesSQLConnection.getEngineType) - .addExecuteCode(parsedSQL) - .setCreator(ujesSQLConnection.creator) - .setUser(ujesSQLConnection.user) - if (ujesSQLConnection.variableMap.nonEmpty) { - action.setVariableMap(ujesSQLConnection.variableMap.asJava) - } - jobExecuteResult = - Utils.tryCatch(ujesSQLConnection.ujesClient.execute(action.build())) { t: Throwable => - logger.error("UJESClient failed to get result", t) - null - } - // jobExecuteResult = ujesSQLConnection.ujesClient.execute(action.build()) - queryEnd = false - var status = ujesSQLConnection.ujesClient.status(jobExecuteResult) - val atMost = - if (queryTimeout > 0) Duration(queryTimeout, TimeUnit.MILLISECONDS) else Duration.Inf - if (!status.isCompleted) Utils.tryThrow { - Utils.waitUntil( - () => { - status = ujesSQLConnection.ujesClient.status(jobExecuteResult) - status.isCompleted || closed - }, - atMost, - 100, - 10000 - ) - } { - case t: TimeoutException => - if (queryTimeout > 0) clearQuery() - new UJESSQLException(UJESSQLErrorCode.QUERY_TIMEOUT, "query has been timeout!").initCause(t) - case t => t - } - if (!closed) { - var jobInfo = ujesSQLConnection.ujesClient.getJobInfo(jobExecuteResult) - if (status.isFailed) - throw new ErrorException( - jobInfo.getRequestPersistTask.getErrCode, - jobInfo.getRequestPersistTask.getErrDesc - ) - val jobInfoStatus = jobInfo.getJobStatus - if (!jobInfoStatus.equals("Succeed")) Utils.tryThrow { - Utils.waitUntil( - () => { - jobInfo = ujesSQLConnection.ujesClient.getJobInfo(jobExecuteResult) - val state = jobInfo.getJobStatus match { - case "Failed" | "Cancelled" | "Timeout" | "Succeed" => true - case _ => false - } - state || closed - }, - atMost, - 100, - 10000 - ) - } { - case t: TimeoutException => - if (queryTimeout > 0) clearQuery() - new UJESSQLException(UJESSQLErrorCode.QUERY_TIMEOUT, "query has been timeout!") - .initCause(t) - case t => t - } - logger.info(s"end to execute sql ${parsedSQL}") - val resultSetList = jobInfo.getResultSetList(ujesSQLConnection.ujesClient) - logger.info(s"resultSetList is ${resultSetList.mkString(",")}") - queryEnd = true - if (resultSetList != null) { - resultSet = new UJESSQLResultSet(resultSetList, this, maxRows, fetchSize) - true - } else false - } else throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED, "Statement is closed.") - }) { case t: Throwable => - logger.error("任务执行失败", t) - false - } - - def getJobExcuteResult: JobExecuteResult = jobExecuteResult - - override def getResultSet: UJESSQLResultSet = resultSet - - override def getUpdateCount: Int = throwWhenClosed(-1) - - override def getMoreResults: Boolean = false - - override def setFetchDirection(direction: Int): Unit = - throwWhenClosed( - if (direction != ResultSet.FETCH_FORWARD) - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "only FETCH_FORWARD is supported." - ) - ) - - override def getFetchDirection: Int = throwWhenClosed(ResultSet.FETCH_FORWARD) - - override def setFetchSize(rows: Int): Unit = this.fetchSize = rows - - override def getFetchSize: Int = fetchSize - - override def getResultSetConcurrency: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "getResultSetConcurrency not supported." - ) - - override def getResultSetType: Int = throwWhenClosed(ResultSet.TYPE_FORWARD_ONLY) - - override def addBatch(sql: String): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "addBatch not supported.") - - override def clearBatch(): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "clearBatch not supported.") - - override def executeBatch(): Array[Int] = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "executeBatch not supported.") - - override def getConnection: Connection = throwWhenClosed(ujesSQLConnection) - - override def getMoreResults(current: Int): Boolean = false - - override def getGeneratedKeys: ResultSet = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "getGeneratedKeys not supported." - ) - - override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "executeUpdate with autoGeneratedKeys not supported." - ) - - override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "executeUpdate with columnIndexes not supported." - ) - - override def executeUpdate(sql: String, columnNames: Array[String]): Int = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "executeUpdate with columnNames not supported." - ) - - override def execute(sql: String, autoGeneratedKeys: Int): Boolean = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "execute with autoGeneratedKeys not supported." - ) - - override def execute(sql: String, columnIndexes: Array[Int]): Boolean = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "execute with columnIndexes not supported." - ) - - override def execute(sql: String, columnNames: Array[String]): Boolean = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "execute with columnNames not supported." - ) - - override def getResultSetHoldability: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "getResultSetHoldability not supported" - ) - - override def isClosed: Boolean = closed - - override def setPoolable(poolable: Boolean): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "setPoolable not supported") - - override def isPoolable: Boolean = false - - override def closeOnCompletion(): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "closeOnCompletion not supported" - ) - - override def isCloseOnCompletion: Boolean = false - - override def unwrap[T](iface: Class[T]): T = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "unwrap not supported") - - override def isWrapperFor(iface: Class[_]): Boolean = false -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala index ba4cd3878fa..387b6ef7dc3 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala @@ -18,12 +18,14 @@ package org.apache.linkis.ujes.jdbc import java.sql.{SQLException, Timestamp, Types} +import java.util.Locale object UJESSQLTypeParser { def parserFromName(typeName: String): Int = { - typeName.toLowerCase match { - case null => throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + val typeNameLowerCase = typeName.toLowerCase(Locale.getDefault()) + typeName.toLowerCase() match { + case null => throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) case "string" => Types.NVARCHAR case "short" => Types.SMALLINT case "int" => Types.INTEGER @@ -43,7 +45,12 @@ object UJESSQLTypeParser { case "bigint" => Types.BIGINT case "array" => Types.ARRAY case "map" => Types.JAVA_OBJECT - case _ => throw new SQLException(s"parameter type error,Type:$typeName") + case _ => + if (typeNameLowerCase.startsWith("decimal")) { + Types.DECIMAL + } else { + Types.NVARCHAR + } } } @@ -60,27 +67,7 @@ object UJESSQLTypeParser { case _: Char => Types.CHAR case _: BigDecimal => Types.DECIMAL case _: Timestamp => Types.TIMESTAMP - case _ => throw new UJESSQLException(UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR) - } - } - - def parserFromMetaData(dataType: Int): String = { - dataType match { - case Types.CHAR => "string" - case Types.SMALLINT => "short" - case Types.INTEGER => "int" - case Types.BIGINT => "long" - case Types.FLOAT => "float" - case Types.DOUBLE => "double" - case Types.BOOLEAN => "boolean" - case Types.TINYINT => "byte" - case Types.CHAR => "char" - case Types.TIMESTAMP => "timestamp" - case Types.DECIMAL => "decimal" - case Types.VARCHAR => "varchar" - case Types.NVARCHAR => "string" - case Types.DATE => "date" - case _ => throw new UJESSQLException(UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR) + case _ => throw new LinkisSQLException(LinkisSQLErrorCode.PREPARESTATEMENT_TYPEERROR) } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala index 038ff387124..c7de7d37341 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala @@ -24,7 +24,7 @@ import scala.collection.mutable.ArrayBuffer trait JDBCDriverPreExecutionHook { - def callPreExecutionHook(sql: String): String + def callPreExecutionHook(sql: String, skip: Boolean): String } @@ -34,8 +34,7 @@ object JDBCDriverPreExecutionHook extends Logging { val hooks = new ArrayBuffer[JDBCDriverPreExecutionHook]() CommonVars( "wds.linkis.jdbc.pre.hook.class", - "org.apache.linkis.ujes.jdbc.hook.impl.TableauPreExecutionHook," + - "org.apache.linkis.ujes.jdbc.hook.impl.NoLimitExecutionHook" + "org.apache.linkis.ujes.jdbc.hook.impl.TableauPreExecutionHook" ).getValue.split(",") foreach { hookStr => Utils.tryCatch { val clazz = Class.forName(hookStr.trim) @@ -51,5 +50,5 @@ object JDBCDriverPreExecutionHook extends Logging { hooks.toArray } - def getPreExecutionHooks = preExecutionHooks + def getPreExecutionHooks: Array[JDBCDriverPreExecutionHook] = preExecutionHooks } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/NoLimitExecutionHook.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/NoLimitExecutionHook.scala deleted file mode 100644 index 18f94c0fde1..00000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/NoLimitExecutionHook.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc.hook.impl - -import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain -import org.apache.linkis.ujes.jdbc.hook.JDBCDriverPreExecutionHook - -class NoLimitExecutionHook extends JDBCDriverPreExecutionHook { - - override def callPreExecutionHook(sql: String): String = { - if (UJESSQLDriverMain.LIMIT_ENABLED.toLowerCase.equals("false")) { - var noLimitSql = "--set ide.engine.no.limit.allow=true\n" + sql - val lowerCaseLimitSql = noLimitSql.toLowerCase() - if (lowerCaseLimitSql.contains("limit ") && lowerCaseLimitSql.contains("tableausql")) { - val lastIndexOfLimit = lowerCaseLimitSql.lastIndexOf("limit ") - noLimitSql = noLimitSql.substring(0, lastIndexOfLimit) - } - noLimitSql - } else { - sql - } - - } - -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala index 63c4f53c9fa..ec177b4de71 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala @@ -21,7 +21,10 @@ import org.apache.linkis.ujes.jdbc.hook.JDBCDriverPreExecutionHook class TableauPreExecutionHook extends JDBCDriverPreExecutionHook { - override def callPreExecutionHook(sql: String): String = { + override def callPreExecutionHook(sql: String, skip: Boolean): String = { + if (skip) { + return sql + } if ( sql.contains("CREATE INDEX") || sql .contains("CREATE TABLE") || sql.contains("INSERT INTO") || sql.contains("DROP TABLE") diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java index 51d9e9953ae..ca2c1e35fda 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java @@ -27,12 +27,12 @@ public class CreateConnection { - private static UJESSQLConnection conn; + private static LinkisSQLConnection conn; - public static UJESSQLConnection getConnection() throws ClassNotFoundException, SQLException { + public static LinkisSQLConnection getConnection() throws ClassNotFoundException, SQLException { Class.forName("org.apache.linkis.ujes.jdbc.UJESSQLDriver"); conn = - (UJESSQLConnection) + (LinkisSQLConnection) DriverManager.getConnection("jdbc:linkis://hostname:port", "username", "password"); return conn; } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java index 427a6b5fc49..3e76bb28561 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java @@ -29,21 +29,21 @@ * */ public class JDBCSpiTest { - private static UJESSQLConnection conn; + private static LinkisSQLConnection conn; - public static UJESSQLConnection getConnection() throws ClassNotFoundException, SQLException { + public static LinkisSQLConnection getConnection() throws ClassNotFoundException, SQLException { Class.forName("org.apache.linkis.ujes.jdbc.UJESSQLDriver"); conn = - (UJESSQLConnection) - DriverManager.getConnection("jdbc:linkis://hostname:port", "root", "123456"); + (LinkisSQLConnection) + DriverManager.getConnection("jdbc:linkis://127.0.0.1:9001", "root", "123456"); return conn; } @Test public void spiTest() { try { - UJESSQLConnection conn = - (UJESSQLConnection) + LinkisSQLConnection conn = + (LinkisSQLConnection) DriverManager.getConnection("jdbc:linkis://hostname:port", "username", "password"); Assertions.assertNotNull(conn); } catch (SQLException e) { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java new file mode 100644 index 00000000000..e319cd02540 --- /dev/null +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java @@ -0,0 +1,343 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.jdbc; + +import org.apache.linkis.governance.common.entity.ExecutionNodeStatus; +import org.apache.linkis.governance.common.entity.task.RequestPersistTask; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.response.JobExecuteResult; +import org.apache.linkis.ujes.client.response.JobInfoResult; +import org.apache.linkis.ujes.client.response.ResultSetResult; + +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Properties; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; + +/* + * Notice: + * if you want to test this module,you must rewrite default parameters and SQL we used for local test + * */ + +public class LinkisSQLStatementTest { + private static LinkisSQLConnection conn; + private static LinkisSQLStatement statement; + private static int maxRows; + private static int queryTimeout; + private static String sql; + private static String sqlCreate; + private static String sqlInsert; + private static String sqlSelect; + private static String sqlDrop; + + @BeforeAll + public static void createConnection() { + try { + conn = CreateConnection.getConnection(); + statement = (LinkisSQLStatement) conn.createStatement(); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); + } catch (Exception e) { + statement = null; + } + } + + @BeforeEach + public void setParams() { + sql = "show tables"; + sqlCreate = "CREATE TABLE if not exists db.test1236 as select * from ai_fmi_ods.1000_10"; + sqlInsert = "insert into db.test1236 select * from ai_fmi_ods.1000_10 limit 10"; + sqlSelect = "select * from db.test1236"; + sqlDrop = "drop table db.test1236"; + maxRows = 100; + queryTimeout = 10000; + } + + @Test + public void execute() { + if (statement != null) { + assertTrue(statement.execute(sql)); + } + } + + @Test + public void executeQuery() { + if (statement != null) { + UJESSQLResultSet resultSet = statement.executeQuery(sql); + assertTrue(resultSet.next()); + } + } + + @Test + public void crud() { + if (statement != null) { + statement.executeQuery(sqlCreate); + statement.executeQuery(sqlInsert); + UJESSQLResultSet resultSet = statement.executeQuery(sqlSelect); + int columnCount = 0; + while (resultSet.next()) { + UJESSQLResultSetMetaData rsmd = resultSet.getMetaData(); + for (int i = 1; i <= rsmd.getColumnCount(); i++) { + System.out.print( + rsmd.getColumnName(i) + + ":" + + rsmd.getColumnTypeName(i) + + ":" + + resultSet.getObject(i) + + " "); + columnCount = i; + } + } + System.out.println(columnCount); + assertTrue(resultSet.isAfterLast()); + statement.executeQuery(sqlDrop); + } + } + + @Test + public void setMaxRows() { + if (statement != null) { + statement.setMaxRows(maxRows); + assertEquals(maxRows, statement.getMaxRows()); + } + } + + @Test + public void setQueryTimeout() { + if (statement != null) { + statement.setQueryTimeout(queryTimeout); + assertEquals(statement.getQueryTimeout(), queryTimeout * 1000); + } + } + + @Test + public void cancel() { + if (statement != null) { + statement.executeQuery(sql); + statement.cancel(); + assertNull(statement.getResultSet()); + assertNull(statement.getJobExcuteResult()); + } + } + + @Test + public void getConnWhenIsClosed() { + if (statement != null) { + assertEquals(statement.getConnection(), conn); + } + } + + /** + * single query without next result set check point 1: getMoreResults returns false check point 2: + * default getMoreResults, use Statement.CLOSE_CURRENT_RESULT. The current result set is closed. + */ + @Test + public void singleQueryWithNoMoreResult() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1"}).when(jobInfoResult).getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(); + assertFalse(moreResults); + assertTrue(resultSet.isClosed()); + } + + /** + * multiple query without multiple result param, return one result check point 1: 2 sql executed. + * 1 result set + */ + @Test + public void multiQueryWithNoMoreResult() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(); + assertFalse(moreResults); + assertTrue(resultSet.isClosed()); + } + + /** + * multiple query executed with multiple result param is Y check point 1: getMoreResults returns + * true check point 2: current result is closed check point 3: second getMoreResults returns false + */ + @Test + public void multiQueryWithMoreResult() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(); + assertTrue(moreResults); + assertTrue(resultSet.isClosed()); + moreResults = linkisSQLStatement.getMoreResults(); + assertFalse(moreResults); + } + + /** + * multiple query executed with multiple result param is Y, and use + * LinkisSQLStatement.KEEP_CURRENT_RESULT check point 1: getMoreResults returns true check point + * 2: current result is not close check point 3: second getMoreResults returns false + */ + @Test + public void multiQueryWithMoreResultNotCloseCurrent() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(LinkisSQLStatement.KEEP_CURRENT_RESULT); + assertTrue(moreResults); + assertFalse(resultSet.isClosed()); + } + + /** + * multiple query executed with multiple result param is Y, and use + * LinkisSQLStatement.CLOSE_ALL_RESULTS check point 1: getMoreResults returns true check point 2: + * current result is not close check point 3: second getMoreResults returns false check point 4: + * first result set is closed after second invoke getMoreResults + */ + @Test + public void multiQueryWithMoreResultCloseAllOpenedCurrent() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + LinkisSQLConnection linkisSQLConnection = Mockito.spy(new LinkisSQLConnection(ujesClient, t)); + LinkisSQLStatement linkisSQLStatement = new LinkisSQLStatement(linkisSQLConnection); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + JobExecuteResult jobExecuteResult = new JobExecuteResult(); + Mockito.doReturn(jobExecuteResult).when(linkisSQLConnection).toSubmit(anyString()); + JobInfoResult jobInfoResult = Mockito.spy(new JobInfoResult()); + Mockito.when(ujesClient.getJobInfo(jobExecuteResult)).thenReturn(jobInfoResult); + Mockito.doReturn(ExecutionNodeStatus.Succeed.name()).when(jobInfoResult).getJobStatus(); + Mockito.doReturn(new RequestPersistTask()).when(jobInfoResult).getRequestPersistTask(); + + Mockito.doReturn(new String[] {"path 1", "path 2"}) + .when(jobInfoResult) + .getResultSetList(ujesClient); + + linkisSQLStatement.execute("select 1;select 2;"); + UJESSQLResultSet resultSet = linkisSQLStatement.getResultSet(); + assertNotNull(resultSet); + assertFalse(resultSet.isClosed()); + // it will close current result set with default value 1 + boolean moreResults = linkisSQLStatement.getMoreResults(Statement.KEEP_CURRENT_RESULT); + assertTrue(moreResults); + assertFalse(resultSet.isClosed()); + moreResults = linkisSQLStatement.getMoreResults(Statement.CLOSE_ALL_RESULTS); + assertFalse(moreResults); + assertTrue(resultSet.isClosed()); + } + + @AfterAll + public static void closeStateAndConn() { + if (statement != null) { + statement.close(); + } + if (conn != null) { + conn.close(); + } + } +} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java index 0dab63b3ffe..100e13f3e46 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java @@ -34,7 +34,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class UJESSQLDatabaseMetaDataTest { - private static UJESSQLConnection conn; + private static LinkisSQLConnection conn; private static UJESSQLDatabaseMetaData dbmd; @BeforeAll @@ -54,42 +54,43 @@ public static void preWork() { @Test public void supportsMinimumSQLGrammar() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsMinimumSQLGrammar()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.supportsMinimumSQLGrammar()); } } @Test public void getResultSetHoldability() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getResultSetHoldability()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getResultSetHoldability()); } } @Test public void getMaxColumnsInGroupBy() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxColumnsInGroupBy()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxColumnsInGroupBy()); } } @Test public void supportsSubqueriesInComparisons() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsSubqueriesInComparisons()); + Assertions.assertThrows( + LinkisSQLException.class, () -> dbmd.supportsSubqueriesInComparisons()); } } @Test public void getMaxColumnsInSelect() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxColumnsInSelect()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxColumnsInSelect()); } } @Test public void nullPlusNonNullIsNull() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.nullPlusNonNullIsNull()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.nullPlusNonNullIsNull()); } } @@ -104,7 +105,7 @@ public void supportsCatalogsInDataManipulation() { public void supportsDataDefinitionAndDataManipulationTransactions() { if (dbmd != null) { Assertions.assertThrows( - UJESSQLException.class, + LinkisSQLException.class, () -> dbmd.supportsDataDefinitionAndDataManipulationTransactions()); } } @@ -112,7 +113,7 @@ public void supportsDataDefinitionAndDataManipulationTransactions() { @Test public void supportsTableCorrelationNames() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsTableCorrelationNames()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.supportsTableCorrelationNames()); } } @@ -133,7 +134,7 @@ public void supportsFullOuterJoins() { @Test public void supportsExpressionsInOrderBy() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsExpressionsInOrderBy()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.supportsExpressionsInOrderBy()); } } @@ -147,14 +148,14 @@ public void allProceduresAreCallable() { @Test public void getMaxTablesInSelect() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxTablesInSelect()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxTablesInSelect()); } } @Test public void nullsAreSortedAtStart() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.nullsAreSortedAtStart()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.nullsAreSortedAtStart()); } } @@ -168,7 +169,7 @@ public void supportsPositionedUpdate() { @Test public void ownDeletesAreVisible() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.ownDeletesAreVisible(0)); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.ownDeletesAreVisible(0)); } } @@ -182,14 +183,14 @@ public void supportsResultSetHoldability() { @Test public void getMaxStatements() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxStatements()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxStatements()); } } @Test public void getRowIdLifetime() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getRowIdLifetime()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getRowIdLifetime()); } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java index 54bc3d4538e..da431c82e78 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java @@ -35,8 +35,8 @@ * */ public class UJESSQLPreparedStatementTest { - private static UJESSQLConnection conn; - private UJESSQLPreparedStatement preStatement; + private static LinkisSQLConnection conn; + private LinkisSQLPreparedStatement preStatement; @BeforeAll public static void getConnection() { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java index 0a545d8012c..c0631427ea6 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java @@ -17,7 +17,14 @@ package org.apache.linkis.ujes.jdbc; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.request.ResultSetAction; +import org.apache.linkis.ujes.client.response.ResultSetResult; + import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -25,6 +32,10 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; /* * Notice: @@ -33,8 +44,8 @@ public class UJESSQLResultSetTest { - private static UJESSQLConnection conn; - private UJESSQLPreparedStatement preStatement; + private static LinkisSQLConnection conn; + private LinkisSQLPreparedStatement preStatement; private UJESSQLResultSet resultSet; private UJESSQLResultSetMetaData metaData; @@ -137,4 +148,101 @@ public void next() { Assertions.assertTrue(resultSet.isAfterLast()); } } + + /** single query result with no multiple result set check point 1: nextResultSet is null */ + @Test + public void singleQueryWithNoMoreResultSet() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + Mockito.when(ujesClient.resultSet(any())).thenReturn(new ResultSetResult()); + + LinkisSQLConnection linkisSQLConnection = new LinkisSQLConnection(ujesClient, t); + + UJESSQLResultSet ujessqlResultSet = + new UJESSQLResultSet( + new String[] {"path1"}, new LinkisSQLStatement(linkisSQLConnection), 0, 0); + + ujessqlResultSet.next(); + + assertNull(ujessqlResultSet.getNextResultSet()); + } + + /** + * multiple result set with multi result switch is Y check point 1: queryResult has two path, + * return first path. check point 2: the second result set returned check point 3: the third + * result set is null + */ + @Test + public void nultiQueryWithMoreResultSet() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + t.put(UJESSQLDriverMain.ENABLE_MULTI_RESULT(), "Y"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + List pathList = new ArrayList<>(); + Mockito.when(ujesClient.resultSet(any())) + .thenAnswer( + invocationOnMock -> { + ResultSetAction argument = invocationOnMock.getArgument(0); + String path = (String) argument.getParameters().get("path"); + if (pathList.isEmpty()) { + assertEquals("path1", path); + } + pathList.add(path); + + return new ResultSetResult(); + }); + LinkisSQLConnection linkisSQLConnection = new LinkisSQLConnection(ujesClient, t); + + UJESSQLResultSet ujessqlResultSet = + new UJESSQLResultSet( + new String[] {"path1", "path2"}, new LinkisSQLStatement(linkisSQLConnection), 0, 0); + + // 查询 + ujessqlResultSet.next(); + + // 存在下一个结果集 + UJESSQLResultSet nextResultSet = ujessqlResultSet.getNextResultSet(); + assertNotNull(nextResultSet); + nextResultSet.next(); + + // 不存在第三个结果集 + assertNull(nextResultSet.getNextResultSet()); + } + + /** + * multiple result set with multi result switch not Y check point 1: queryResult has two path, + * return last path. check point 2: the next result set is null + */ + @Test + public void nultiQueryWithNoMoreResultSet() { + Properties t = new Properties(); + t.put("user", "hiveUser"); + UJESClient ujesClient = Mockito.mock(UJESClient.class); + Mockito.when(ujesClient.resultSet(any())) + .thenAnswer( + invocationOnMock -> { + ResultSetAction argument = invocationOnMock.getArgument(0); + String path = (String) argument.getParameters().get("path"); + assertEquals("path4", path); + + return new ResultSetResult(); + }); + + LinkisSQLConnection linkisSQLConnection = new LinkisSQLConnection(ujesClient, t); + + UJESSQLResultSet ujessqlResultSet = + new UJESSQLResultSet( + new String[] {"path1", "path2", "path3", "path4"}, + new LinkisSQLStatement(linkisSQLConnection), + 0, + 0); + + // 查询 + ujessqlResultSet.next(); + + // 即使查询有多个结果集,也不会产生多个结果集返回 + UJESSQLResultSet nextResultSet = ujessqlResultSet.getNextResultSet(); + assertNull(nextResultSet); + } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLStatementTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLStatementTest.java deleted file mode 100644 index e674dd5b235..00000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLStatementTest.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc; - -import java.sql.SQLException; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -/* - * Notice: - * if you want to test this module,you must rewrite default parameters and SQL we used for local test - * */ - -public class UJESSQLStatementTest { - private static UJESSQLConnection conn; - private static UJESSQLStatement statement; - private static int maxRows; - private static int queryTimeout; - private static String sql; - private static String sqlCreate; - private static String sqlInsert; - private static String sqlSelect; - private static String sqlDrop; - - @BeforeAll - public static void createConnection() { - try { - conn = CreateConnection.getConnection(); - statement = (UJESSQLStatement) conn.createStatement(); - } catch (ClassNotFoundException e) { - e.printStackTrace(); - } catch (SQLException e) { - e.printStackTrace(); - } catch (Exception e) { - statement = null; - } - } - - @BeforeEach - public void setParams() { - sql = "show tables"; - sqlCreate = "CREATE TABLE if not exists db.test1236 as select * from ai_fmi_ods.1000_10"; - sqlInsert = "insert into db.test1236 select * from ai_fmi_ods.1000_10 limit 10"; - sqlSelect = "select * from db.test1236"; - sqlDrop = "drop table db.test1236"; - maxRows = 100; - queryTimeout = 10000; - } - - @Test - public void execute() { - if (statement != null) { - assertTrue(statement.execute(sql)); - } - } - - @Test - public void executeQuery() { - if (statement != null) { - UJESSQLResultSet resultSet = statement.executeQuery(sql); - assertTrue(resultSet.next()); - } - } - - @Test - public void crud() { - if (statement != null) { - statement.executeQuery(sqlCreate); - statement.executeQuery(sqlInsert); - UJESSQLResultSet resultSet = statement.executeQuery(sqlSelect); - int columnCount = 0; - while (resultSet.next()) { - UJESSQLResultSetMetaData rsmd = resultSet.getMetaData(); - for (int i = 1; i <= rsmd.getColumnCount(); i++) { - System.out.print( - rsmd.getColumnName(i) - + ":" - + rsmd.getColumnTypeName(i) - + ":" - + resultSet.getObject(i) - + " "); - columnCount = i; - } - } - System.out.println(columnCount); - assertTrue(resultSet.isAfterLast()); - statement.executeQuery(sqlDrop); - } - } - - @Test - public void setMaxRows() { - if (statement != null) { - statement.setMaxRows(maxRows); - assertEquals(maxRows, statement.getMaxRows()); - } - } - - @Test - public void setQueryTimeout() { - if (statement != null) { - statement.setQueryTimeout(queryTimeout); - assertEquals(statement.getQueryTimeout(), queryTimeout * 1000); - } - } - - @Test - public void cancel() { - if (statement != null) { - statement.executeQuery(sql); - statement.cancel(); - assertNull(statement.getResultSet()); - assertNull(statement.getJobExcuteResult()); - } - } - - @Test - public void getConnWhenIsClosed() { - if (statement != null) { - assertEquals(statement.getConnection(), conn); - } - } - - @AfterAll - public static void closeStateAndConn() { - if (statement != null) { - statement.close(); - } - if (conn != null) { - conn.close(); - } - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml index 99f4613974e..4b807cf150e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml @@ -43,7 +43,7 @@ org.apache.linkis - linkis-bml-client + linkis-pes-client ${project.version} @@ -96,10 +96,9 @@ - org.json4s - json4s-jackson_${scala.binary.version} - ${json4s.version} - provided + org.apache.linkis + linkis-ps-common-lock + ${project.version} @@ -108,7 +107,31 @@ ${gson.version} provided - + + io.fabric8 + kubernetes-client + ${kubernetes-client.version} + + + io.fabric8 + kubernetes-model-common + + + io.fabric8 + kubernetes-model-core + + + + + io.fabric8 + kubernetes-model-common + ${kubernetes-client.version} + + + io.fabric8 + kubernetes-model-core + ${kubernetes-client.version} + diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java index 0e54ed8c4e5..d9cb61e4ada 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java @@ -260,8 +260,7 @@ private Class loadEngineConnPluginClass( private Map readFromProperties(String propertiesFile) { Map map = new HashMap<>(); Properties properties = new Properties(); - try { - BufferedReader reader = new BufferedReader(new FileReader(propertiesFile)); + try (BufferedReader reader = new BufferedReader(new FileReader(propertiesFile))) { properties.load(reader); map = new HashMap((Map) properties); } catch (IOException e) { diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java index 2bfcd00acae..21295f17899 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java @@ -132,8 +132,7 @@ private static String getEngineConnPluginClassFromURL( } return acceptedFunction.apply(className) ? className : null; } else if (url.endsWith(JAR_SUF_NAME)) { - try { - JarFile jarFile = new JarFile(new File(url)); + try (JarFile jarFile = new JarFile(new File(url))) { Enumeration en = jarFile.entries(); while (en.hasMoreElements()) { String name = en.nextElement().getName(); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java index 803151d5340..061529524d1 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java @@ -114,7 +114,8 @@ public PageInfo queryDataSourceInfoPage( @Override public void uploadToECHome(MultipartFile mfile) { String engineConnsHome = defaultEngineConnBmlResourceGenerator.getEngineConnsHome(); - try { + try (OutputStream out = + new FileOutputStream(engineConnsHome + "/" + mfile.getOriginalFilename())) { InputStream in = mfile.getInputStream(); byte[] buffer = new byte[1024]; int len = 0; @@ -122,11 +123,9 @@ public void uploadToECHome(MultipartFile mfile) { if (!file.exists()) { log.info("engineplugin's home doesn’t exist"); } - OutputStream out = new FileOutputStream(engineConnsHome + "/" + mfile.getOriginalFilename()); while ((len = in.read(buffer)) != -1) { out.write(buffer, 0, len); } - out.close(); in.close(); } catch (Exception e) { log.info("file {} upload fail", mfile.getOriginalFilename()); @@ -135,7 +134,9 @@ public void uploadToECHome(MultipartFile mfile) { ZipUtils.fileToUnzip(engineConnsHome + "/" + mfile.getOriginalFilename(), engineConnsHome); File file = new File(engineConnsHome + "/" + mfile.getOriginalFilename()); if (file.exists()) { - file.delete(); + if (!file.delete()) { + log.error("file {} delete failed", mfile.getOriginalFilename()); + } log.info("file {} delete success", mfile.getOriginalFilename()); } } @@ -146,9 +147,13 @@ public static void deleteDir(File directory) { if (file.isDirectory()) { deleteDir(file); } else { - file.delete(); + if (!file.delete()) { + log.error("file {} delete failed", file.getName()); + } } } - directory.delete(); + if (!directory.delete()) { + log.error("directory {} delete failed", directory.getName()); + } } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java index cfbefdbaaac..064d61a6fb4 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.am; +package org.apache.linkis.manager; import org.apache.linkis.LinkisBaseServerApp; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java new file mode 100644 index 00000000000..e558191efd8 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java @@ -0,0 +1,242 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.conf; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.TimeType; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.manager.common.entity.enumeration.MaintainType; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +public class AMConfiguration { + + // The configuration key for the YARN queue name. + public static final String YARN_QUEUE_NAME_CONFIG_KEY = "wds.linkis.rm.yarnqueue"; + + // Identifier for cross-queue tasks. + public static final String CROSS_QUEUE = "crossQueue"; + + // Identifier for across-cluster tasks. + public static final String ACROSS_CLUSTER_TASK = "acrossClusterTask"; + + // Identifier for priority clusters. + public static final String PRIORITY_CLUSTER = "priorityCluster"; + + // Target identifier for distinguishing target clusters. + public static final String PRIORITY_CLUSTER_TARGET = "bdp"; + + // Origin identifier for distinguishing source clusters. + public static final String PRIORITY_CLUSTER_ORIGIN = "bdap"; + + // Configuration key for the target cluster CPU threshold. + public static final String TARGET_CPU_THRESHOLD = "targetCPUThreshold"; + + // Configuration key for the target cluster memory threshold. + public static final String TARGET_MEMORY_THRESHOLD = "targetMemoryThreshold"; + + // Configuration key for the target cluster CPU percentage threshold. + public static final String TARGET_CPU_PERCENTAGE_THRESHOLD = "targetCPUPercentageThreshold"; + + // Configuration key for the target cluster memory percentage threshold. + public static final String TARGET_MEMORY_PERCENTAGE_THRESHOLD = "targetMemoryPercentageThreshold"; + + // Configuration key for the origin cluster CPU percentage threshold. + public static final String ORIGIN_CPU_PERCENTAGE_THRESHOLD = "originCPUPercentageThreshold"; + + // Configuration key for the origin cluster memory percentage threshold. + public static final String ORIGIN_MEMORY_PERCENTAGE_THRESHOLD = "originMemoryPercentageThreshold"; + + public static final double ACROSS_CLUSTER_TOTAL_MEMORY_PERCENTAGE_THRESHOLD = + CommonVars.apply("linkis.yarn.across.cluster.memory.threshold", 0.8).getValue(); + + public static final double ACROSS_CLUSTER_TOTAL_CPU_PERCENTAGE_THRESHOLD = + CommonVars.apply("linkis.yarn.across.cluster.cpu.threshold", 0.8).getValue(); + + public static final CommonVars ECM_ADMIN_OPERATIONS = + CommonVars.apply("wds.linkis.governance.admin.operations", ""); + + public static final CommonVars ENGINE_START_MAX_TIME = + CommonVars.apply("wds.linkis.manager.am.engine.start.max.time", new TimeType("8m")); + + public static final CommonVars ENGINE_CONN_START_REST_MAX_WAIT_TIME = + CommonVars.apply("wds.linkis.manager.am.engine.rest.start.max.time", new TimeType("40s")); + + public static final CommonVars ENGINE_REUSE_MAX_TIME = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.max.time", new TimeType("5m")); + + public static final Integer ENGINE_REUSE_COUNT_LIMIT = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.count.limit", 2).getValue(); + + public static final CommonVars DEFAULT_NODE_OWNER = + CommonVars.apply("wds.linkis.manager.am.default.node.owner", "hadoop"); + + public static final CommonVars EM_NEW_WAIT_MILLS = + CommonVars.apply("wds.linkis.manager.am.em.new.wait.mills", 1000 * 60L); + + public static final CommonVars MULTI_USER_ENGINE_TYPES = + CommonVars.apply( + "wds.linkis.multi.user.engine.types", + "es,presto,io_file,appconn,openlookeng,trino,jobserver,nebula,hbase,doris"); + + public static final CommonVars ALLOW_BATCH_KILL_ENGINE_TYPES = + CommonVars.apply("wds.linkis.allow.batch.kill.engine.types", "spark,hive,python"); + + public static final CommonVars UNALLOW_BATCH_KILL_ENGINE_TYPES = + CommonVars.apply( + "wds.linkis.unallow.batch.kill.engine.types", "trino,appconn,io_file,nebula,jdbc"); + public static final CommonVars MULTI_USER_ENGINE_USER = + CommonVars.apply("wds.linkis.multi.user.engine.user", getDefaultMultiEngineUser()); + public static final String UDF_KILL_ENGINE_TYPE = + CommonVars.apply("linkis.udf.kill.engine.type", "spark,hive").getValue(); + + public static final CommonVars ENGINE_LOCKER_MAX_TIME = + CommonVars.apply("wds.linkis.manager.am.engine.locker.max.time", 1000 * 60 * 5); + + public static final String AM_CAN_RETRY_LOGS = + CommonVars.apply( + "wds.linkis.manager.am.can.retry.logs", "already in use;Cannot allocate memory") + .getValue(); + + public static final int REUSE_ENGINE_ASYNC_MAX_THREAD_SIZE = + CommonVars.apply("wds.linkis.manager.reuse.max.thread.size", 200).getValue(); + + public static final int CREATE_ENGINE_ASYNC_MAX_THREAD_SIZE = + CommonVars.apply("wds.linkis.manager.create.max.thread.size", 200).getValue(); + + public static final int ASK_ENGINE_ERROR_ASYNC_MAX_THREAD_SIZE = + CommonVars.apply("wds.linkis.manager.ask.error.max.thread.size", 100).getValue(); + + public static final int ASK_ENGINE_ASYNC_MAX_THREAD_SIZE = + CommonVars.apply("wds.linkis.ecm.launch.max.thread.size", 200).getValue(); + + public static final int ASYNC_STOP_ENGINE_MAX_THREAD_SIZE = + CommonVars.apply("wds.linkis.async.stop.engine.size", 20).getValue(); + + public static final CommonVars EC_MAINTAIN_TIME_STR = + CommonVars.apply("wds.linkis.ec.maintain.time.key", MaintainType.Default.toString()); + + public static final int EC_MAINTAIN_WORK_START_TIME = + CommonVars.apply("wds.linkis.ec.maintain.time.work.start.time", 8).getValue(); + + public static final int EC_MAINTAIN_WORK_END_TIME = + CommonVars.apply("wds.linkis.ec.maintain.time.work.end.time", 19).getValue(); + + public static final Boolean NODE_SELECT_HOTSPOT_EXCLUSION_RULE = + CommonVars.apply("linkis.node.select.hotspot.exclusion.rule.enable", true).getValue(); + + public static final CommonVars NODE_SELECT_HOTSPOT_EXCLUSION_SHUFFLE_RULER = + CommonVars.apply("linkis.node.select.hotspot.exclusion.shuffle.ruler", "size-limit"); + + public static final boolean EC_REUSE_WITH_RESOURCE_RULE_ENABLE = + CommonVars.apply("linkis.ec.reuse.with.resource.rule.enable", false).getValue(); + + public static final boolean EC_REUSE_WITH_TEMPLATE_RULE_ENABLE = + CommonVars.apply("linkis.ec.reuse.with.template.rule.enable", false).getValue(); + + public static final String EC_REUSE_WITH_RESOURCE_WITH_ECS = + CommonVars.apply("linkis.ec.reuse.with.resource.with.ecs", "spark,hive,shell,python") + .getValue(); + + public static final String SUPPORT_CLUSTER_RULE_EC_TYPES = + CommonVars.apply("linkis.support.cluster.rule.ec.types", "").getValue(); + + public static final boolean HIVE_CLUSTER_EC_EXECUTE_ONCE_RULE_ENABLE = + CommonVars.apply("linkis.hive.cluster.ec.execute.once.rule.enable", true).getValue(); + + public static final String LONG_LIVED_LABEL = + CommonVars.apply("linkis.label.node.long.lived.label.keys", "tenant|yarnCluster").getValue(); + + public static final String TMP_LIVED_LABEL = + CommonVars.apply("linkis.label.node.tmp.lived.label.keys", "taskId").getValue(); + + public static final boolean COMBINED_WITHOUT_YARN_DEFAULT = + CommonVars.apply("linkis.combined.without.yarn.default", true).getValue(); + + public static final Map AM_ENGINE_ASK_MAX_NUMBER = new HashMap<>(); + + static { + String keyValue = + CommonVars.apply("linkis.am.engine.ask.max.number", "appconn=5,trino=10").getValue(); + String[] keyValuePairs = keyValue.split(","); + for (String pair : keyValuePairs) { + String[] array = pair.split("="); + if (array.length != 2) { + throw new IllegalArgumentException( + "linkis.am.engine.ask.max.number value is illegal, value is " + pair); + } else { + AM_ENGINE_ASK_MAX_NUMBER.put(array[0], Integer.parseInt(array[1])); + } + } + } + + public static final boolean AM_ECM_RESET_RESOURCE = + CommonVars.apply("linkis.am.ecm.reset.resource.enable", true).getValue(); + + public static final boolean AM_USER_RESET_RESOURCE = + CommonVars.apply("linkis.am.user.reset.resource.enable", true).getValue(); + + public static final CommonVars ENGINE_REUSE_ENABLE_CACHE = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.enable.cache", false); + + public static final CommonVars ENGINE_REUSE_CACHE_EXPIRE_TIME = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.cache.expire.time", new TimeType("5s")); + + public static final CommonVars ENGINE_REUSE_CACHE_MAX_SIZE = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.cache.max.size", 1000L); + + public static final CommonVars ENGINE_REUSE_CACHE_SUPPORT_ENGINES = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.cache.support.engines", "shell"); + public static final CommonVars ENGINE_REUSE_SHUFF_SUPPORT_ENGINES = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.shuff.support.engines", "shell"); + + public static String getDefaultMultiEngineUser() { + String jvmUser = Utils.getJvmUser(); + return String.format( + "{jdbc:\"%s\", es: \"%s\", presto:\"%s\", appconn:\"%s\", openlookeng:\"%s\", trino:\"%s\", nebula:\"%s\",doris:\"%s\", hbase:\"%s\", jobserver:\"%s\",io_file:\"root\"}", + jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser); + } + + public static boolean isMultiUserEngine(String engineType) { + String[] multiUserEngine = AMConfiguration.MULTI_USER_ENGINE_TYPES.getValue().split(","); + Optional findResult = + Arrays.stream(multiUserEngine).filter(e -> e.equalsIgnoreCase(engineType)).findFirst(); + return findResult.isPresent(); + } + + public static boolean isAllowKilledEngineType(String engineType) { + String[] allowBatchKillEngine = + AMConfiguration.ALLOW_BATCH_KILL_ENGINE_TYPES.getValue().split(","); + Optional findResult = + Arrays.stream(allowBatchKillEngine).filter(e -> e.equalsIgnoreCase(engineType)).findFirst(); + return findResult.isPresent(); + } + + public static boolean isUnAllowKilledEngineType(String engineType) { + String[] unAllowBatchKillEngine = + AMConfiguration.UNALLOW_BATCH_KILL_ENGINE_TYPES.getValue().split(","); + Optional findResult = + Arrays.stream(unAllowBatchKillEngine) + .filter(e -> engineType.toLowerCase().contains(e)) + .findFirst(); + return findResult.isPresent(); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java new file mode 100644 index 00000000000..fa9843d9554 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.conf; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryEngineConfigWithGlobalConfig; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryGlobalConfig; +import org.apache.linkis.governance.common.protocol.conf.ResponseQueryConfig; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.apache.linkis.protocol.CacheableProtocol; +import org.apache.linkis.rpc.RPCMapCache; + +import java.util.Map; + +import scala.Tuple2; + +public class ConfigurationMapCache { + + public static RPCMapCache globalMapCache = + new RPCMapCache( + Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME().getValue()) { + @Override + public CacheableProtocol createRequest(UserCreatorLabel userCreatorLabel) { + return new RequestQueryGlobalConfig(userCreatorLabel.getUser()); + } + + @Override + public Map createMap(Object any) { + if (any instanceof ResponseQueryConfig) { + return ((ResponseQueryConfig) any).getKeyAndValue(); + } + return null; + } + }; + + public static RPCMapCache, String, String> + engineMapCache = + new RPCMapCache, String, String>( + Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME().getValue()) { + @Override + public CacheableProtocol createRequest( + Tuple2 labelTuple) { + return new RequestQueryEngineConfigWithGlobalConfig( + labelTuple._1(), labelTuple._2(), null); + } + + @Override + public Map createMap(Object any) { + if (any instanceof ResponseQueryConfig) { + return ((ResponseQueryConfig) any).getKeyAndValue(); + } + return null; + } + }; +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java new file mode 100644 index 00000000000..1492c6569f4 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.conf; + +import org.apache.linkis.common.utils.LinkisUtils; +import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; + +import org.springframework.stereotype.Component; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import scala.Tuple2; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Component +class DefaultEngineConnConfigurationService implements EngineConnConfigurationService { + + private static final Logger logger = + LoggerFactory.getLogger(DefaultEngineConnConfigurationService.class); + + @Override + public Map getConsoleConfiguration(List> labelList) { + Map properties = new HashMap<>(); + + Optional userCreatorLabelOption = + labelList.stream() + .filter(l -> l instanceof UserCreatorLabel) + .map(l -> (UserCreatorLabel) l) + .findFirst(); + + Optional engineTypeLabelOption = + labelList.stream() + .filter(l -> l instanceof EngineTypeLabel) + .map(l -> (EngineTypeLabel) l) + .findFirst(); + userCreatorLabelOption.ifPresent( + userCreatorLabel -> { + engineTypeLabelOption.ifPresent( + engineTypeLabel -> { + Map engineConfig = + LinkisUtils.tryAndWarn( + () -> + ConfigurationMapCache.engineMapCache.getCacheMap( + new Tuple2(userCreatorLabel, engineTypeLabel)), + logger); + if (engineConfig != null) { + properties.putAll(engineConfig); + } + }); + }); + return properties; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/EngineConnConfigurationService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/EngineConnConfigurationService.java new file mode 100644 index 00000000000..2d2d01d0573 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/EngineConnConfigurationService.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.conf; + +import org.apache.linkis.manager.label.entity.Label; + +import java.util.List; +import java.util.Map; + +public interface EngineConnConfigurationService { + + Map getConsoleConfiguration(List> labelList); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java new file mode 100644 index 00000000000..bce581a2e95 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.conf; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.TimeType; + +public class ManagerMonitorConf { + + public static final CommonVars NODE_MAX_CREATE_TIME = + CommonVars.apply("wds.linkis.manager.am.node.create.time", new TimeType("12m")); + + public static final CommonVars NODE_HEARTBEAT_MAX_UPDATE_TIME = + CommonVars.apply("wds.linkis.manager.am.node.heartbeat", new TimeType("12m")); + + public static final CommonVars MANAGER_MONITOR_ASYNC_POLL_SIZE = + CommonVars.apply("wds.linkis.manager.monitor.async.poll.size", 5); + + public static final CommonVars MONITOR_SWITCH_ON = + CommonVars.apply("wds.linkis.manager.am.monitor.switch.on", true); + + public static final CommonVars ECM_HEARTBEAT_MAX_UPDATE_TIME = + CommonVars.apply("wds.linkis.manager.am.ecm.heartbeat", new TimeType("5m")); + + public static final CommonVars ACROSS_QUEUES_RESOURCE_SHOW_SWITCH_ON = + CommonVars.apply("wds.linkis.manager.across.resource.show.switch.on", false); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java new file mode 100644 index 00000000000..132409c1545 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.converter; + +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo; +import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; +import org.apache.linkis.manager.common.entity.metrics.NodeOverLoadInfo; +import org.apache.linkis.manager.common.entity.metrics.NodeTaskInfo; +import org.apache.linkis.manager.common.entity.node.AMNode; +import org.apache.linkis.server.BDPJettyServerHelper; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.stereotype.Component; + +import java.io.IOException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Component +public class DefaultMetricsConverter implements MetricsConverter { + + private static final Logger logger = LoggerFactory.getLogger(DefaultMetricsConverter.class); + + @Override + public NodeTaskInfo parseTaskInfo(NodeMetrics nodeMetrics) { + String msg = nodeMetrics.getHeartBeatMsg(); + if (StringUtils.isNotBlank(msg)) { + try { + JsonNode jsonNode = BDPJettyServerHelper.jacksonJson().readTree(msg); + if (jsonNode != null && jsonNode.has("taskInfo")) { + NodeTaskInfo taskInfo = + BDPJettyServerHelper.jacksonJson() + .readValue(jsonNode.get("taskInfo").asText(), NodeTaskInfo.class); + return taskInfo; + } + } catch (IOException e) { + logger.warn("parse task info failed", e); + } + } + return null; + } + + @Override + public NodeHealthyInfo parseHealthyInfo(NodeMetrics nodeMetrics) { + String healthyInfo = nodeMetrics.getHealthy(); + if (StringUtils.isNotBlank(healthyInfo)) { + try { + return BDPJettyServerHelper.jacksonJson().readValue(healthyInfo, NodeHealthyInfo.class); + } catch (IOException e) { + logger.warn("parse healthy info failed", e); + } + } + return null; + } + + @Override + public NodeOverLoadInfo parseOverLoadInfo(NodeMetrics nodeMetrics) { + String overLoad = nodeMetrics.getOverLoad(); + if (StringUtils.isNotBlank(overLoad)) { + try { + return BDPJettyServerHelper.jacksonJson().readValue(overLoad, NodeOverLoadInfo.class); + } catch (IOException e) { + logger.warn("parse over load info failed", e); + } + } + return null; + } + + @Override + public NodeStatus parseStatus(NodeMetrics nodeMetrics) { + return NodeStatus.values()[nodeMetrics.getStatus()]; + } + + @Override + public String convertTaskInfo(NodeTaskInfo nodeTaskInfo) { + try { + return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeTaskInfo); + } catch (JsonProcessingException e) { + logger.warn("convert task info failed", e); + } + return null; + } + + @Override + public String convertHealthyInfo(NodeHealthyInfo nodeHealthyInfo) { + try { + return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeHealthyInfo); + } catch (JsonProcessingException e) { + logger.warn("convert healthy info failed", e); + } + return null; + } + + @Override + public String convertOverLoadInfo(NodeOverLoadInfo nodeOverLoadInfo) { + try { + return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeOverLoadInfo); + } catch (JsonProcessingException e) { + logger.warn("convert over load info failed", e); + } + return null; + } + + @Override + public int convertStatus(NodeStatus nodeStatus) { + return nodeStatus.ordinal(); + } + + @Override + public AMNode fillMetricsToNode(AMNode amNode, NodeMetrics metrics) { + if (metrics == null) { + return amNode; + } + amNode.setNodeStatus(parseStatus(metrics)); + amNode.setNodeTaskInfo(parseTaskInfo(metrics)); + amNode.setNodeHealthyInfo(parseHealthyInfo(metrics)); + amNode.setNodeOverLoadInfo(parseOverLoadInfo(metrics)); + amNode.setUpdateTime(metrics.getUpdateTime()); + amNode.setNodeDescription(metrics.getDescription()); + return amNode; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java new file mode 100644 index 00000000000..e84b577f456 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.converter; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy; +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.metrics.*; +import org.apache.linkis.manager.common.entity.node.AMNode; + +public interface MetricsConverter { + NodeTaskInfo parseTaskInfo(NodeMetrics nodeMetrics); + + NodeHealthyInfo parseHealthyInfo(NodeMetrics nodeMetrics); + + NodeOverLoadInfo parseOverLoadInfo(NodeMetrics nodeMetrics); + + NodeStatus parseStatus(NodeMetrics nodeMetrics); + + String convertTaskInfo(NodeTaskInfo nodeTaskInfo); + + String convertHealthyInfo(NodeHealthyInfo nodeHealthyInfo); + + String convertOverLoadInfo(NodeOverLoadInfo nodeOverLoadInfo); + + int convertStatus(NodeStatus nodeStatus); + + AMNode fillMetricsToNode(AMNode amNode, NodeMetrics metrics); + + default NodeMetrics getInitMetric(ServiceInstance serviceInstance) { + AMNodeMetrics nodeMetrics = new AMNodeMetrics(); + nodeMetrics.setStatus(NodeStatus.Starting.ordinal()); + + NodeHealthyInfo nodeHealthyInfo = new NodeHealthyInfo(); + nodeHealthyInfo.setNodeHealthy(NodeHealthy.Healthy); + nodeMetrics.setHealthy(convertHealthyInfo(nodeHealthyInfo)); + nodeMetrics.setServiceInstance(serviceInstance); + return nodeMetrics; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java new file mode 100644 index 00000000000..1202433fb2e --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.event.message; + +import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid; + +import org.springframework.context.ApplicationEvent; + +public class EngineConnPidCallbackEvent extends ApplicationEvent { + + private ResponseEngineConnPid protocol; + + public EngineConnPidCallbackEvent(ResponseEngineConnPid protocol) { + super(protocol); + this.protocol = protocol; + } + + public ResponseEngineConnPid getProtocol() { + return protocol; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java index 3734e3bdf61..c05768739c2 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java @@ -28,7 +28,11 @@ public enum AMErrorCode implements LinkisErrorCode { NOT_EXISTS_ENGINE_CONN(210003, "Not exists EngineConn(不存在的引擎)"), - AM_CONF_ERROR(210004, "AM configuration error(AM配置错误)"); + AM_CONF_ERROR(210004, "AM configuration error(AM配置错误)"), + + ASK_ENGINE_ERROR_RETRY(210005, "Ask engine error, retry(请求引擎失败,重试)"), + + EC_OPERATE_ERROR(210006, "Failed to execute operation(引擎操作失败)"); private final int errorCode; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java new file mode 100644 index 00000000000..13e45832c8f --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.label; + +import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.manager.label.entity.em.EMInstanceLabel; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; + +import org.springframework.stereotype.Component; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +@Component +class AMLabelChecker implements LabelChecker { + + @Override + public boolean checkEngineLabel(List> labelList) { + return checkCorrespondingLabel(labelList, EngineTypeLabel.class, UserCreatorLabel.class); + } + + @Override + public boolean checkEMLabel(List> labelList) { + return checkCorrespondingLabel(labelList, EMInstanceLabel.class); + } + + @Override + public boolean checkCorrespondingLabel(List> labelList, Class... clazz) { + List> classes = Arrays.asList(clazz); + return labelList.stream() + .filter(Objects::nonNull) + .map(Label::getClass) + .collect(Collectors.toList()) + .containsAll(classes); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java new file mode 100644 index 00000000000..8820bc0be83 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.label; + +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; +import org.apache.linkis.manager.label.entity.EMNodeLabel; +import org.apache.linkis.manager.label.entity.EngineNodeLabel; +import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.manager.label.entity.node.AliasServiceInstanceLabel; +import org.apache.linkis.manager.service.common.label.LabelFilter; + +import org.springframework.stereotype.Component; + +import java.util.List; +import java.util.stream.Collectors; + +@Component +public class AMLabelFilter implements LabelFilter { + + @Override + public List> choseEngineLabel(List> labelList) { + return labelList.stream() + .filter( + label -> + label instanceof EngineNodeLabel + || (label instanceof AliasServiceInstanceLabel + && ((AliasServiceInstanceLabel) label) + .getAlias() + .equals(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME().getValue()))) + .collect(Collectors.toList()); + } + + @Override + public List> choseEMLabel(List> labelList) { + return labelList.stream() + .filter( + label -> + label instanceof EMNodeLabel + || (label instanceof AliasServiceInstanceLabel + && ((AliasServiceInstanceLabel) label) + .getAlias() + .equals( + GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME().getValue()))) + .collect(Collectors.toList()); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/DefaultManagerLabelService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/DefaultManagerLabelService.java new file mode 100644 index 00000000000..50c7e1ec6e9 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/DefaultManagerLabelService.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.label; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.manager.label.entity.em.EMInstanceLabel; +import org.apache.linkis.manager.label.entity.engine.EngineInstanceLabel; +import org.apache.linkis.manager.label.service.NodeLabelService; +import org.apache.linkis.manager.service.common.label.ManagerLabelService; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class DefaultManagerLabelService implements ManagerLabelService { + + private static final Logger logger = LoggerFactory.getLogger(DefaultManagerLabelService.class); + + @Autowired private NodeLabelService nodeLabelService; + + @Override + public boolean isEngine(ServiceInstance serviceInstance) { + List> labelList = nodeLabelService.getNodeLabels(serviceInstance); + return isEngine(labelList); + } + + @Override + public boolean isEM(ServiceInstance serviceInstance) { + List> labelList = nodeLabelService.getNodeLabels(serviceInstance); + boolean isEngine = labelList.stream().anyMatch(label -> label instanceof EngineInstanceLabel); + if (!isEngine) { + return labelList.stream().anyMatch(label -> label instanceof EMInstanceLabel); + } else { + return false; + } + } + + @Override + public boolean isEngine(List> labels) { + return labels.stream().anyMatch(label -> label instanceof EngineInstanceLabel); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/EngineReuseLabelChooser.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/EngineReuseLabelChooser.java new file mode 100644 index 00000000000..edbaf638fae --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/EngineReuseLabelChooser.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.label; + +import org.apache.linkis.manager.label.entity.Label; + +import java.util.List; + +public interface EngineReuseLabelChooser { + + List> chooseLabels(List> labelList); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java new file mode 100644 index 00000000000..0c1e27d0860 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.label; + +import org.apache.linkis.manager.label.entity.Label; + +import java.util.List; + +public interface LabelChecker { + + boolean checkEngineLabel(List> labelList); + + boolean checkEMLabel(List> labelList); + + boolean checkCorrespondingLabel(List> labelList, Class... clazz); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java new file mode 100644 index 00000000000..b8ed766072f --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.label; + +import org.apache.linkis.manager.am.conf.AMConfiguration; +import org.apache.linkis.manager.am.exception.AMErrorCode; +import org.apache.linkis.manager.am.exception.AMErrorException; +import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.apache.linkis.server.BDPJettyServerHelper; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Stream; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Component +public class MultiUserEngineReuseLabelChooser implements EngineReuseLabelChooser { + private static final Logger logger = + LoggerFactory.getLogger(MultiUserEngineReuseLabelChooser.class); + + private final String[] multiUserEngine = + AMConfiguration.MULTI_USER_ENGINE_TYPES.getValue().split(","); + private final Map userMap = getMultiUserEngineUserMap(); + + private Map getMultiUserEngineUserMap() { + String userJson = AMConfiguration.MULTI_USER_ENGINE_USER.getValue(); + if (StringUtils.isNotBlank(userJson)) { + Map userMap = BDPJettyServerHelper.gson().fromJson(userJson, Map.class); + return userMap; + } else { + throw new AMErrorException( + AMErrorCode.AM_CONF_ERROR.getErrorCode(), + String.format( + "Multi-user engine parameter configuration error, please check key %s", + AMConfiguration.MULTI_USER_ENGINE_USER.key())); + } + } + + /** + * Filter out UserCreator Label that supports multi-user engine + * + * @param labelList + * @return + */ + @Override + public List> chooseLabels(List> labelList) { + List> labels = new ArrayList<>(labelList); + Optional engineTypeLabelOption = + labels.stream() + .filter(label -> label instanceof EngineTypeLabel) + .map(label -> (EngineTypeLabel) label) + .findFirst(); + if (engineTypeLabelOption.isPresent()) { + EngineTypeLabel engineTypeLabel = engineTypeLabelOption.get(); + Optional maybeString = + Stream.of(multiUserEngine) + .filter(engineTypeLabel.getEngineType()::equalsIgnoreCase) + .findFirst(); + Optional userCreatorLabelOption = + labels.stream() + .filter(label -> label instanceof UserCreatorLabel) + .map(label -> (UserCreatorLabel) label) + .findFirst(); + if (maybeString.isPresent() && userCreatorLabelOption.isPresent()) { + String userAdmin = userMap.get(engineTypeLabel.getEngineType()); + UserCreatorLabel userCreatorLabel = userCreatorLabelOption.get(); + logger.info( + String.format( + "For multi user engine to reset userCreatorLabel user %s to Admin %s", + userCreatorLabel.getUser(), userAdmin)); + userCreatorLabel.setUser(userAdmin); + return labels; + } + } + return labelList; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java new file mode 100644 index 00000000000..fe93a034110 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.locker; + +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; +import org.apache.linkis.manager.common.entity.node.AMEngineNode; +import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.protocol.RequestEngineLock; +import org.apache.linkis.manager.common.protocol.RequestEngineUnlock; +import org.apache.linkis.manager.common.protocol.RequestManagerUnlock; +import org.apache.linkis.manager.common.protocol.engine.EngineLockType; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Optional; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Component +public class DefaultEngineNodeLocker implements EngineNodeLocker { + private static final Logger logger = LoggerFactory.getLogger(DefaultEngineNodeLocker.class); + + @Autowired private NodePointerBuilder nodeBuilder; + + @Override + public Optional lockEngine(EngineNode engineNode, long timeout) { + return nodeBuilder + .buildEngineNodePointer(engineNode) + .lockEngine(new RequestEngineLock(timeout, EngineLockType.Timed)); + } + + @Override + public void releaseLock(EngineNode engineNode, String lock) { + nodeBuilder.buildEngineNodePointer(engineNode).releaseLock(new RequestEngineUnlock(lock)); + } + + @Receiver + public void releaseLock(RequestManagerUnlock requestManagerUnlock) { + try { + logger.info( + String.format( + "client%s Start to unlock engine %s", + requestManagerUnlock.clientInstance(), requestManagerUnlock.engineInstance())); + AMEngineNode engineNode = new AMEngineNode(); + engineNode.setServiceInstance(requestManagerUnlock.engineInstance()); + releaseLock(engineNode, requestManagerUnlock.lock()); + logger.info( + String.format( + "client%s Finished to unlock engine %s", + requestManagerUnlock.clientInstance(), requestManagerUnlock.engineInstance())); + } catch (Exception e) { + logger.error("release lock failed", e); + } + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/EngineNodeLocker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/EngineNodeLocker.java new file mode 100644 index 00000000000..171714b3330 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/EngineNodeLocker.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.locker; + +import org.apache.linkis.manager.common.entity.node.EngineNode; + +import java.util.Optional; + +public interface EngineNodeLocker { + + Optional lockEngine(EngineNode engineNode, long timeout); + + void releaseLock(EngineNode engineNode, String lock); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java new file mode 100644 index 00000000000..691aa635a47 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.manager; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.am.converter.MetricsConverter; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; +import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; +import org.apache.linkis.manager.common.entity.node.*; +import org.apache.linkis.manager.common.entity.persistence.PersistenceNodeEntity; +import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; +import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; +import org.apache.linkis.manager.exception.NodeInstanceDuplicateException; +import org.apache.linkis.manager.persistence.NodeManagerPersistence; +import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; +import org.apache.linkis.manager.rm.ResourceInfo; +import org.apache.linkis.manager.rm.service.ResourceManager; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Component +public class DefaultEMNodeManager implements EMNodeManager { + private static final Logger logger = LoggerFactory.getLogger(DefaultEMNodeManager.class); + + @Autowired private NodeManagerPersistence nodeManagerPersistence; + + @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence; + + @Autowired private MetricsConverter metricsConverter; + + @Autowired private NodePointerBuilder nodePointerBuilder; + + @Autowired private ResourceManager resourceManager; + + @Override + public void emRegister(EMNode emNode) { + nodeManagerPersistence.addNodeInstance(emNode); + // init metric + nodeMetricManagerPersistence.addOrupdateNodeMetrics( + metricsConverter.getInitMetric(emNode.getServiceInstance())); + } + + @Override + public void addEMNodeInstance(EMNode emNode) { + try { + nodeManagerPersistence.addNodeInstance(emNode); + } catch (NodeInstanceDuplicateException e) { + logger.warn("em instance had exists, {}.", emNode); + nodeManagerPersistence.updateEngineNode(emNode.getServiceInstance(), emNode); + } + } + + @Override + public void initEMNodeMetrics(EMNode emNode) { + nodeMetricManagerPersistence.addOrupdateNodeMetrics( + metricsConverter.getInitMetric(emNode.getServiceInstance())); + } + + @Override + public List listEngines(EMNode emNode) { + List result = new ArrayList<>(); + List nodes = nodeManagerPersistence.getEngineNodeByEM(emNode.getServiceInstance()); + Map metricses = new HashMap<>(); + for (NodeMetrics m : nodeMetricManagerPersistence.getNodeMetrics(nodes)) { + metricses.put(m.getServiceInstance().toString(), m); + } + for (EngineNode node : nodes) { + NodeMetrics metrics = metricses.get(node.getServiceInstance().toString()); + if (metrics != null) { + metricsConverter.fillMetricsToNode(node, metrics); + } + result.add(node); + } + return result; + } + + @Override + public List listUserEngines(EMNode emNode, String user) { + List result = new ArrayList<>(); + for (EngineNode node : listEngines(emNode)) { + if (node.getOwner().equals(user)) { + result.add(node); + } + } + return result; + } + + @Override + public List listUserNodes(String user) { + return nodeManagerPersistence.getNodes(user); + } + + /** + * Get detailed em information from the persistence TODO add label to node ? + * + * @param scoreServiceInstances + * @return + */ + @Override + public EMNode[] getEMNodes(ScoreServiceInstance[] scoreServiceInstances) { + if (null == scoreServiceInstances || scoreServiceInstances.length == 0) { + return null; + } + EMNode[] emNodes = + Arrays.stream(scoreServiceInstances) + .map( + scoreServiceInstance -> { + AMEMNode emNode = new AMEMNode(); + emNode.setScore(scoreServiceInstance.getScore()); + emNode.setServiceInstance(scoreServiceInstance.getServiceInstance()); + return emNode; + }) + .toArray(EMNode[]::new); + + // 1. add nodeMetrics 2 add RM info + ResourceInfo resourceInfo = + resourceManager.getResourceInfo( + Arrays.stream(scoreServiceInstances) + .map(ScoreServiceInstance::getServiceInstance) + .toArray(ServiceInstance[]::new)); + List nodeMetrics = + nodeMetricManagerPersistence.getNodeMetrics(Arrays.asList(emNodes)); + + for (EMNode emNode : emNodes) { + Optional optionMetrics = + nodeMetrics.stream() + .filter(metrics -> metrics.getServiceInstance().equals(emNode.getServiceInstance())) + .findFirst(); + Optional optionRMNode = + resourceInfo.resourceInfo().stream() + .filter(rmNode -> rmNode.getServiceInstance().equals(emNode.getServiceInstance())) + .findFirst(); + + optionMetrics.ifPresent(metrics -> metricsConverter.fillMetricsToNode(emNode, metrics)); + optionRMNode.ifPresent(rmNode -> emNode.setNodeResource(rmNode.getNodeResource())); + } + return emNodes; + } + + @Override + public EMNode getEM(ServiceInstance serviceInstance) { + Node node = nodeManagerPersistence.getNode(serviceInstance); + if (null == node) { + logger.info("This em of " + serviceInstance + " not exists in db"); + return null; + } + AMEMNode emNode = new AMEMNode(); + emNode.setOwner(node.getOwner()); + emNode.setServiceInstance(node.getServiceInstance()); + if (node instanceof PersistenceNodeEntity) { + emNode.setStartTime(node.getStartTime()); + } + emNode.setMark(emNode.getMark()); + metricsConverter.fillMetricsToNode(emNode, nodeMetricManagerPersistence.getNodeMetrics(emNode)); + return emNode; + } + + @Override + public void stopEM(EMNode emNode) { + nodePointerBuilder.buildEMNodePointer(emNode).stopNode(); + } + + @Override + public void deleteEM(EMNode emNode) { + nodeManagerPersistence.removeNodeInstance(emNode); + logger.info("Finished to clear emNode instance(" + emNode.getServiceInstance() + ") info "); + nodeMetricManagerPersistence.deleteNodeMetrics(emNode); + logger.info("Finished to clear emNode(" + emNode.getServiceInstance() + ") metrics info"); + } + + @Override + public void pauseEM(ServiceInstance serviceInstance) {} + + /** + * 1. request engineManager to launch engine + * + * @param engineConnLaunchRequest + * @param emNode + * @return + */ + @Override + public EngineNode createEngine(EngineConnLaunchRequest engineConnLaunchRequest, EMNode emNode) { + return nodePointerBuilder.buildEMNodePointer(emNode).createEngine(engineConnLaunchRequest); + } + + @Override + public void stopEngine(EngineStopRequest engineStopRequest, EMNode emNode) { + nodePointerBuilder.buildEMNodePointer(emNode).stopEngine(engineStopRequest); + } + + @Override + public ECMOperateResponse executeOperation(EMNode ecmNode, ECMOperateRequest request) { + return nodePointerBuilder.buildEMNodePointer(ecmNode).executeOperation(request); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java new file mode 100644 index 00000000000..e3e9689b4c3 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java @@ -0,0 +1,391 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.manager; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.exception.LinkisRetryException; +import org.apache.linkis.manager.am.conf.AMConfiguration; +import org.apache.linkis.manager.am.converter.MetricsConverter; +import org.apache.linkis.manager.am.exception.AMErrorCode; +import org.apache.linkis.manager.am.exception.AMErrorException; +import org.apache.linkis.manager.am.locker.EngineNodeLocker; +import org.apache.linkis.manager.am.pointer.EngineNodePointer; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; +import org.apache.linkis.manager.common.entity.node.*; +import org.apache.linkis.manager.common.entity.persistence.PersistenceLabel; +import org.apache.linkis.manager.common.entity.persistence.PersistenceNode; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; +import org.apache.linkis.manager.common.protocol.node.NodeHeartbeatMsg; +import org.apache.linkis.manager.dao.NodeManagerMapper; +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory; +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; +import org.apache.linkis.manager.label.entity.engine.EngineInstanceLabel; +import org.apache.linkis.manager.persistence.LabelManagerPersistence; +import org.apache.linkis.manager.persistence.NodeManagerPersistence; +import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; +import org.apache.linkis.manager.rm.ResourceInfo; +import org.apache.linkis.manager.rm.service.ResourceManager; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.retry.annotation.Backoff; +import org.springframework.retry.annotation.Retryable; +import org.springframework.stereotype.Service; + +import java.lang.reflect.UndeclaredThrowableException; +import java.util.*; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class DefaultEngineNodeManager implements EngineNodeManager { + private static final Logger logger = LoggerFactory.getLogger(DefaultEngineNodeManager.class); + + @Autowired private EngineNodeLocker engineLocker; + + @Autowired private NodeManagerPersistence nodeManagerPersistence; + + @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence; + + @Autowired private MetricsConverter metricsConverter; + + @Autowired private NodePointerBuilder nodePointerBuilder; + + @Autowired private ResourceManager resourceManager; + + @Autowired private NodeManagerMapper nodeManagerMapper; + + @Autowired private LabelManagerPersistence labelManagerPersistence; + + private final LabelBuilderFactory labelBuilderFactory = + LabelBuilderFactoryContext.getLabelBuilderFactory(); + + @Override + public List listEngines(String user) { + List userNodes = nodeManagerPersistence.getNodes(user); + + List nodes = + userNodes.stream() + .map(Node::getServiceInstance) + .map(nodeManagerPersistence::getEngineNode) + .collect(Collectors.toList()); + + List nodeMetrics = nodeMetricManagerPersistence.getNodeMetrics(nodes); + Map metricses = + nodeMetrics.stream() + .collect( + Collectors.toMap( + m -> m.getServiceInstance().toString(), + m -> m, + (existingValue, newValue) -> newValue)); + + nodes.forEach( + node -> { + Optional nodeMetricsOptional = + Optional.ofNullable(metricses.get(node.getServiceInstance().toString())); + nodeMetricsOptional.ifPresent(m -> metricsConverter.fillMetricsToNode(node, m)); + }); + return nodes; + } + + @Retryable( + value = {feign.RetryableException.class, UndeclaredThrowableException.class}, + maxAttempts = 5, + backoff = @Backoff(delay = 10000)) + @Override + public EngineNode getEngineNodeInfo(EngineNode engineNode) { + EngineNodePointer engine = nodePointerBuilder.buildEngineNodePointer(engineNode); + NodeHeartbeatMsg heartMsg = engine.getNodeHeartbeatMsg(); + engineNode.setNodeHealthyInfo(heartMsg.getHealthyInfo()); + engineNode.setNodeOverLoadInfo(heartMsg.getOverLoadInfo()); + engineNode.setNodeStatus(heartMsg.getStatus()); + return engineNode; + } + + @Override + public EngineNode getEngineNodeInfoByDB(EngineNode engineNode) { + EngineNode dbEngineNode = nodeManagerPersistence.getEngineNode(engineNode.getServiceInstance()); + if (null == dbEngineNode) { + throw new LinkisRetryException( + AMConstant.ENGINE_ERROR_CODE, engineNode + " not exists in db"); + } + metricsConverter.fillMetricsToNode( + dbEngineNode, nodeMetricManagerPersistence.getNodeMetrics(dbEngineNode)); + return dbEngineNode; + } + + @Override + public EngineNode getEngineNodeInfoByTicketId(String ticketId) { + EngineNode dbEngineNode = nodeManagerPersistence.getEngineNodeByTicketId(ticketId); + if (null == dbEngineNode) { + throw new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, ticketId + " not exists in db"); + } + metricsConverter.fillMetricsToNode( + dbEngineNode, nodeMetricManagerPersistence.getNodeMetrics(dbEngineNode)); + return dbEngineNode; + } + + @Override + public void updateEngineStatus( + ServiceInstance serviceInstance, NodeStatus fromState, NodeStatus toState) {} + + @Override + public void updateEngine(EngineNode engineNode) { + nodeManagerPersistence.updateNodeInstance(engineNode); + } + + @Override + public EngineNode switchEngine(EngineNode engineNode) { + return null; + } + + @Override + public EngineNode reuseEngine(EngineNode engineNode) { + EngineNode node = getEngineNodeInfo(engineNode); + if (node == null || !NodeStatus.isAvailable(node.getNodeStatus())) { + return null; + } + if (!NodeStatus.isLocked(node.getNodeStatus())) { + Optional lockStr = + engineLocker.lockEngine(node, (long) AMConfiguration.ENGINE_LOCKER_MAX_TIME.getValue()); + if (!lockStr.isPresent()) { + throw new LinkisRetryException( + AMConstant.ENGINE_ERROR_CODE, + String.format( + "Failed to request lock from engine by reuse %s", node.getServiceInstance())); + } + node.setLock(lockStr.get()); + return node; + } else { + return null; + } + } + + /** + * TODO use Engine需要考虑流式引擎的场景,后续需要通过Label加额外的处理 + * + * @param engineNode + * @param timeout + * @return + */ + @Override + public EngineNode useEngine(EngineNode engineNode, long timeout) { + // wait until engine to be available + EngineNode node = getEngineNodeInfo(engineNode); + if (node == null || !NodeStatus.isAvailable(node.getNodeStatus())) { + return null; + } + if (!NodeStatus.isLocked(node.getNodeStatus())) { + Optional lockStr = engineLocker.lockEngine(node, timeout); + if (!lockStr.isPresent()) { + throw new LinkisRetryException( + AMConstant.ENGINE_ERROR_CODE, + String.format("Failed to request lock from engine %s", node.getServiceInstance())); + } + node.setLock(lockStr.get()); + return node; + } else { + return null; + } + } + + @Override + public EngineNode useEngine(EngineNode engineNode) { + return useEngine(engineNode, AMConfiguration.ENGINE_LOCKER_MAX_TIME.getValue()); + } + + /** + * Get detailed engine information from the persistence + * + * @param scoreServiceInstances + * @return + */ + @Override + public EngineNode[] getEngineNodes(ScoreServiceInstance[] scoreServiceInstances) { + if (scoreServiceInstances == null || scoreServiceInstances.length == 0) { + return null; + } + List instances = new ArrayList(); + List scoreServiceInstancesList = Arrays.asList(scoreServiceInstances); + EngineNode[] engineNodes = + scoreServiceInstancesList.stream() + .map( + scoreServiceInstance -> { + AMEngineNode engineNode = new AMEngineNode(); + engineNode.setScore(scoreServiceInstance.getScore()); + engineNode.setServiceInstance(scoreServiceInstance.getServiceInstance()); + instances.add(scoreServiceInstance.getServiceInstance().getInstance()); + return engineNode; + }) + .toArray(EngineNode[]::new); + + List serviceInstancesList = + scoreServiceInstancesList.stream() + .map(ScoreServiceInstance::getServiceInstance) + .collect(Collectors.toList()); + + try { + logger.info("start getEngineNodes."); + ResourceInfo resourceInfo = + resourceManager.getResourceInfo(serviceInstancesList.toArray(new ServiceInstance[0])); + logger.info("end resourceInfo {}", resourceInfo); + if (serviceInstancesList.isEmpty()) { + throw new LinkisRetryException( + AMConstant.ENGINE_ERROR_CODE, "Service instances cannot be empty."); + } + + List nodeMetrics = + nodeMetricManagerPersistence.getNodeMetrics(Arrays.asList(engineNodes)); + logger.info( + "get nodeMetrics, with engineNode size: {}, res size: {}", + engineNodes.length, + nodeMetrics.size()); + List persistenceNodes = nodeManagerMapper.getNodesByInstances(instances); + logger.info( + "get persistenceNodes, with instance size: {}, res size: {}", + instances.size(), + persistenceNodes.size()); + + for (EngineNode engineNode : engineNodes) { + Optional optionMetrics = + nodeMetrics.stream() + .filter( + nodeMetric -> + nodeMetric.getServiceInstance().equals(engineNode.getServiceInstance())) + .findFirst(); + + Optional optionRMNode = + resourceInfo.resourceInfo().stream() + .filter( + resourceNode -> + resourceNode.getServiceInstance().equals(engineNode.getServiceInstance())) + .findFirst(); + + optionMetrics.ifPresent(metrics -> metricsConverter.fillMetricsToNode(engineNode, metrics)); + optionRMNode.ifPresent(rmNode -> engineNode.setNodeResource(rmNode.getNodeResource())); + + persistenceNodes.stream() + .filter( + node -> node.getInstance().equals(engineNode.getServiceInstance().getInstance())) + .findFirst() + .ifPresent(persistenceNode -> engineNode.setParams(persistenceNode.getParams())); + } + } catch (Exception e) { + LinkisRetryException linkisRetryException = + new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, "Failed to process data."); + linkisRetryException.initCause(e); + throw linkisRetryException; + } + logger.info("end getEngineNodes"); + return engineNodes; + } + + /** + * add info to persistence + * + * @param engineNode + */ + @Override + public void addEngineNode(EngineNode engineNode) { + nodeManagerPersistence.addEngineNode(engineNode); + // init metric + nodeMetricManagerPersistence.addOrupdateNodeMetrics( + metricsConverter.getInitMetric(engineNode.getServiceInstance())); + } + + /** + * delete info to persistence + * + * @param engineNode + */ + @Override + public void deleteEngineNode(EngineNode engineNode) { + nodeManagerPersistence.deleteEngineNode(engineNode); + } + + @Override + public EngineNode getEngineNode(ServiceInstance serviceInstance) { + return nodeManagerPersistence.getEngineNode(serviceInstance); + } + + /** + * 1.serviceInstance中取出instance(实际是ticketId) 2.update serviceInstance 表,包括 + * instance替换,替换mark,owner,updator,creator的空值,更新updateTime 3.update engine_em关联表 4.update label + * ticket_id ==> instance + * + * @param serviceInstance + * @param engineNode + */ + @Override + public void updateEngineNode(ServiceInstance serviceInstance, EngineNode engineNode) { + nodeManagerPersistence.updateEngineNode(serviceInstance, engineNode); + nodeMetricManagerPersistence.deleteNodeMetrics(engineNode); + + EngineInstanceLabel engineLabel = labelBuilderFactory.createLabel(EngineInstanceLabel.class); + engineLabel.setInstance(engineNode.getServiceInstance().getInstance()); + engineLabel.setServiceName(engineNode.getServiceInstance().getApplicationName()); + + EngineInstanceLabel oldEngineLabel = labelBuilderFactory.createLabel(EngineInstanceLabel.class); + oldEngineLabel.setInstance(serviceInstance.getInstance()); + oldEngineLabel.setServiceName(engineNode.getServiceInstance().getApplicationName()); + PersistenceLabel oldPersistenceLabel = + labelBuilderFactory.convertLabel(oldEngineLabel, PersistenceLabel.class); + PersistenceLabel label = + labelManagerPersistence.getLabelByKeyValue( + oldPersistenceLabel.getLabelKey(), oldPersistenceLabel.getStringValue()); + + PersistenceLabel persistenceLabel = + labelBuilderFactory.convertLabel(engineLabel, PersistenceLabel.class); + persistenceLabel.setLabelValueSize(persistenceLabel.getValue().size()); + labelManagerPersistence.updateLabel(label.getId(), persistenceLabel); + } + + public EngineOperateResponse executeOperation( + EngineNode engineNode, EngineOperateRequest request) { + EngineNodePointer engine = nodePointerBuilder.buildEngineNodePointer(engineNode); + return engine.executeOperation(request); + } + + public EngineNode getEngineNodeInfo(ServiceInstance serviceInstance) { + EngineNode engineNode = getEngineNode(serviceInstance); + if (Objects.isNull(engineNode)) { + throw new AMErrorException( + AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorCode(), + AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorDesc()); + } + NodeMetrics nodeMetric = nodeMetricManagerPersistence.getNodeMetrics(engineNode); + if (engineNode.getNodeStatus() == null) { + if (null != nodeMetric && null != nodeMetric.getStatus()) { + engineNode.setNodeStatus(NodeStatus.values()[nodeMetric.getStatus()]); + } else { + engineNode.setNodeStatus(NodeStatus.Starting); + } + if (null != nodeMetric && StringUtils.isNotBlank(nodeMetric.getHeartBeatMsg())) { + engineNode.setEcMetrics(nodeMetric.getHeartBeatMsg()); + } + } + return engineNode; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java new file mode 100644 index 00000000000..cf540036a3f --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.manager; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.common.entity.node.EMNode; +import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.entity.node.Node; +import org.apache.linkis.manager.common.entity.node.ScoreServiceInstance; +import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; +import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; + +import java.util.List; + +public interface EMNodeManager { + + void emRegister(EMNode emNode); + + List listEngines(EMNode emNode); + + List listUserEngines(EMNode emNode, String user); + + List listUserNodes(String user); + + /** + * Get detailed em information from the persistence + * + * @param scoreServiceInstances + * @return + */ + EMNode[] getEMNodes(ScoreServiceInstance[] scoreServiceInstances); + + EMNode getEM(ServiceInstance serviceInstance); + + void stopEM(EMNode emNode); + + void deleteEM(EMNode emNode); + + void pauseEM(ServiceInstance serviceInstance); + + /** + * 1. request engineManager to launch engine 2. persist engine info + * + * @param engineConnLaunchRequest engine launch request + * @param emNode ecm node + * @return engine node + */ + EngineNode createEngine(EngineConnLaunchRequest engineConnLaunchRequest, EMNode emNode); + + void stopEngine(EngineStopRequest engineStopRequest, EMNode emNode); + + void addEMNodeInstance(EMNode emNode); + + void initEMNodeMetrics(EMNode emNode); + + ECMOperateResponse executeOperation(EMNode ecmNode, ECMOperateRequest request); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java new file mode 100644 index 00000000000..7c3f64efee5 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.manager; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.entity.node.ScoreServiceInstance; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; + +import java.util.List; + +public interface EngineNodeManager { + + List listEngines(String user); + + EngineNode getEngineNode(ServiceInstance serviceInstance); + + EngineNode getEngineNodeInfo(EngineNode engineNode); + + EngineNode getEngineNodeInfo(ServiceInstance serviceInstance); + + EngineNode getEngineNodeInfoByDB(EngineNode engineNode); + + EngineNode getEngineNodeInfoByTicketId(String ticketId); + + /** + * Get detailed engine information from the persistence + * + * @param scoreServiceInstances + * @return + */ + EngineNode[] getEngineNodes(ScoreServiceInstance[] scoreServiceInstances); + + void updateEngineStatus( + ServiceInstance serviceInstance, NodeStatus fromState, NodeStatus toState); + + void addEngineNode(EngineNode engineNode); + + void updateEngineNode(ServiceInstance serviceInstance, EngineNode engineNode); + + void updateEngine(EngineNode engineNode); + + void deleteEngineNode(EngineNode engineNode); + + EngineNode switchEngine(EngineNode engineNode); + + EngineNode reuseEngine(EngineNode engineNode); + + EngineNode useEngine(EngineNode engineNode, long timeout); + + EngineNode useEngine(EngineNode engineNode); + + EngineOperateResponse executeOperation(EngineNode engineNode, EngineOperateRequest request); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java new file mode 100644 index 00000000000..1c6f8100339 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.node.Node; +import org.apache.linkis.manager.common.protocol.node.*; +import org.apache.linkis.manager.label.entity.Label; +import org.apache.linkis.rpc.Sender; + +public abstract class AbstractNodePointer implements NodePointer { + + protected Sender getSender() { + return Sender.getSender(getNode().getServiceInstance()); + } + + /** + * 向对应的Node发送请求获取节点状态 + * + * @return + */ + @Override + public NodeStatus getNodeStatus() { + Sender sender = getSender(); + ResponseNodeStatus responseStatus = (ResponseNodeStatus) sender.ask(new RequestNodeStatus()); + return responseStatus.getNodeStatus(); + } + + /** + * 向对应的Node发送请求获取节点心跳信息 + * + * @return + */ + @Override + public NodeHeartbeatMsg getNodeHeartbeatMsg() { + Sender sender = getSender(); + NodeHeartbeatMsg heartbeatMsg = (NodeHeartbeatMsg) sender.ask(new NodeHeartbeatRequest()); + return heartbeatMsg; + } + + /** + * 向对应的Node发送Kill 请求 + * + * @return + */ + @Override + public void stopNode() { + Sender sender = getSender(); + sender.send(new StopNodeRequest()); + } + + /** + * 向对应的Node Label 更新请求 + * + * @return + */ + @Override + public void updateLabels(Label[] labels) {} + + @Override + public void updateNodeHealthyRequest(NodeHealthyRequest nodeHealthyRequest) { + getSender().send(nodeHealthyRequest); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof Node) { + Node nodeB = (Node) obj; + return getNode().getServiceInstance().equals(nodeB.getServiceInstance()); + } + return false; + } + + @Override + public int hashCode() { + return getNode().getServiceInstance().hashCode(); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java new file mode 100644 index 00000000000..07097fcb0ba --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.common.exception.LinkisRetryException; +import org.apache.linkis.manager.am.exception.AMErrorException; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.entity.node.Node; +import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; +import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse; +import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; +import org.apache.linkis.server.BDPJettyServerHelper; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DefaultEMNodPointer extends AbstractNodePointer implements EMNodPointer { + private static final Logger logger = LoggerFactory.getLogger(DefaultEMNodPointer.class); + + private Node node; + + public DefaultEMNodPointer(Node node) { + this.node = node; + } + + /** + * 与该远程指针关联的node信息 + * + * @return + */ + @Override + public Node getNode() { + return node; + } + + @Override + public EngineNode createEngine(EngineConnLaunchRequest engineConnLaunchRequest) { + logger.info("Start to createEngine ask em " + getNode().getServiceInstance()); + Object result = getSender().ask(engineConnLaunchRequest); + if (result instanceof EngineNode) { + EngineNode engineNode = (EngineNode) result; + logger.info( + "Succeed to createEngine ask em " + + getNode().getServiceInstance() + + ", engineNode " + + engineNode); + return engineNode; + } else { + throw new LinkisRetryException( + AMConstant.ENGINE_ERROR_CODE, + "Failed to createEngine ask em " + getNode().getServiceInstance() + "result: " + result); + } + } + + @Override + public void stopEngine(EngineStopRequest engineStopRequest) { + try { + Object result = getSender().ask(engineStopRequest); + if (result instanceof EngineStopResponse) { + EngineStopResponse engineStopResponse = (EngineStopResponse) result; + if (!engineStopResponse.getStopStatus()) { + logger.info( + "Kill engine : " + + engineStopRequest.getServiceInstance().toString() + + " failed, because " + + engineStopResponse.getMsg() + + " . Will ask engine to suicide."); + } else { + logger.info( + "Succeed to kill engine " + engineStopRequest.getServiceInstance().toString() + "."); + } + } else { + logger.warn( + "Ask em : " + + getNode().getServiceInstance().toString() + + " to kill engine : " + + engineStopRequest.getServiceInstance().toString() + + " failed, response is : " + + BDPJettyServerHelper.gson().toJson(result) + + "."); + } + } catch (Exception e) { + logger.warn( + "Ask em : " + + getNode().getServiceInstance().toString() + + " to kill engine : " + + engineStopRequest.getServiceInstance().toString() + + " failed, exception is : " + + e.getMessage() + + "."); + } + } + + @Override + public ECMOperateResponse executeOperation(ECMOperateRequest ecmOperateRequest) { + Object result = getSender().ask(ecmOperateRequest); + if (result instanceof ECMOperateResponse) { + return (ECMOperateResponse) result; + } else { + throw new AMErrorException(AMConstant.ENGINE_ERROR_CODE, "Failed to execute ECM operation."); + } + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java new file mode 100644 index 00000000000..cdbbcbbf09b --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.manager.am.exception.AMErrorException; +import org.apache.linkis.manager.common.entity.node.Node; +import org.apache.linkis.manager.common.protocol.RequestEngineLock; +import org.apache.linkis.manager.common.protocol.RequestEngineUnlock; +import org.apache.linkis.manager.common.protocol.ResponseEngineLock; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; + +import java.util.Optional; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DefaultEngineNodPointer extends AbstractNodePointer implements EngineNodePointer { + private static final Logger logger = LoggerFactory.getLogger(DefaultEngineNodPointer.class); + + private Node node; + + public DefaultEngineNodPointer(Node node) { + this.node = node; + } + + @Override + public Node getNode() { + return node; + } + + @Override + public Optional lockEngine(RequestEngineLock requestEngineLock) { + Object result = getSender().ask(requestEngineLock); + if (result instanceof ResponseEngineLock) { + ResponseEngineLock responseEngineLock = (ResponseEngineLock) result; + if (responseEngineLock.lockStatus()) { + return Optional.of(responseEngineLock.lock()); + } else { + logger.info( + "Failed to get locker," + node.getServiceInstance() + ": " + responseEngineLock.msg()); + return Optional.empty(); + } + } else { + return Optional.empty(); + } + } + + @Override + public void releaseLock(RequestEngineUnlock requestEngineUnlock) { + getSender().send(requestEngineUnlock); + } + + @Override + public EngineOperateResponse executeOperation(EngineOperateRequest engineOperateRequest) { + Object result = getSender().ask(engineOperateRequest); + if (result instanceof EngineOperateResponse) { + return (EngineOperateResponse) result; + } else { + throw new AMErrorException(-1, "Illegal response of operation."); + } + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java new file mode 100644 index 00000000000..58ec1ae940e --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.manager.common.entity.node.EMNode; +import org.apache.linkis.manager.common.entity.node.EngineNode; + +import org.springframework.stereotype.Component; + +@Component +public class DefaultNodePointerBuilder implements NodePointerBuilder { + @Override + public EMNodPointer buildEMNodePointer(EMNode node) { + return new DefaultEMNodPointer(node); + } + + @Override + public EngineNodePointer buildEngineNodePointer(EngineNode node) { + return new DefaultEngineNodPointer(node); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java new file mode 100644 index 00000000000..a85bf6eaca9 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; +import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; + +public interface EMNodPointer extends NodePointer { + + EngineNode createEngine(EngineConnLaunchRequest engineConnLaunchRequest); + + void stopEngine(EngineStopRequest engineStopRequest); + + ECMOperateResponse executeOperation(ECMOperateRequest request); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java new file mode 100644 index 00000000000..8be00a09c22 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.manager.common.protocol.RequestEngineLock; +import org.apache.linkis.manager.common.protocol.RequestEngineUnlock; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; +import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; + +import java.util.Optional; + +public interface EngineNodePointer extends NodePointer { + + Optional lockEngine(RequestEngineLock requestEngineLock); + + void releaseLock(RequestEngineUnlock requestEngineUnlock); + + EngineOperateResponse executeOperation(EngineOperateRequest engineOperateRequest); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java new file mode 100644 index 00000000000..e5d519873ee --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.node.Node; +import org.apache.linkis.manager.common.protocol.node.NodeHealthyRequest; +import org.apache.linkis.manager.common.protocol.node.NodeHeartbeatMsg; +import org.apache.linkis.manager.label.entity.Label; + +public interface NodePointer { + + /** + * 与该远程指针关联的node信息 + * + * @return + */ + Node getNode(); + + /** + * 向对应的Node发送请求获取节点状态 + * + * @return + */ + NodeStatus getNodeStatus(); + + /** + * 向对应的Node发送请求获取节点心跳信息 + * + * @return + */ + NodeHeartbeatMsg getNodeHeartbeatMsg(); + + /** + * 向对应的Node发送Kill 请求 + * + * @return + */ + void stopNode(); + + /** + * 向对应的Node发送Label更新 请求 + * + * @return + */ + void updateLabels(Label[] labels); + + void updateNodeHealthyRequest(NodeHealthyRequest nodeHealthyRequest); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java new file mode 100644 index 00000000000..649e1423990 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.pointer; + +import org.apache.linkis.manager.common.entity.node.EMNode; +import org.apache.linkis.manager.common.entity.node.EngineNode; + +public interface NodePointerBuilder { + + EMNodPointer buildEMNodePointer(EMNode node); + + EngineNodePointer buildEngineNodePointer(EngineNode node); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/recycle/AssignNodeRuleExecutor.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/recycle/AssignNodeRuleExecutor.java new file mode 100644 index 00000000000..f9a3acce4f2 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/recycle/AssignNodeRuleExecutor.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.recycle; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.common.entity.recycle.AssignNodeRule; +import org.apache.linkis.manager.common.entity.recycle.RecyclingRule; + +import org.springframework.stereotype.Component; + +@Component +public class AssignNodeRuleExecutor implements RecyclingRuleExecutor { + @Override + public boolean ifAccept(RecyclingRule recyclingRule) { + return recyclingRule instanceof AssignNodeRule; + } + + @Override + public ServiceInstance[] executeRule(RecyclingRule recyclingRule) { + if (recyclingRule instanceof AssignNodeRule) { + AssignNodeRule assignNodeRule = (AssignNodeRule) recyclingRule; + return new ServiceInstance[] {assignNodeRule.serviceInstance()}; + } else { + return null; + } + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/recycle/RecyclingRuleExecutor.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/recycle/RecyclingRuleExecutor.java new file mode 100644 index 00000000000..ab53aa82812 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/recycle/RecyclingRuleExecutor.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.recycle; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.common.entity.recycle.RecyclingRule; + +public interface RecyclingRuleExecutor { + + boolean ifAccept(RecyclingRule recyclingRule); + + ServiceInstance[] executeRule(RecyclingRule recyclingRule); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java index 03a2b1465d4..70caae1a8d5 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java @@ -71,9 +71,10 @@ public class ECResourceInfoRestfulApi { public Message getECInfo( HttpServletRequest req, @RequestParam(value = "ticketid") String ticketid) throws AMErrorException { + logger.info("ticked: {} get ec info", ticketid); ECResourceInfoRecord ecResourceInfoRecord = ecResourceInfoService.getECResourceInfoRecord(ticketid); - String userName = ModuleUserUtils.getOperationUser(req, "getECInfo ticketid:") + ticketid; + String userName = ModuleUserUtils.getOperationUser(req, "getECInfo ticketid:" + ticketid); if (null != ecResourceInfoRecord && (userName.equalsIgnoreCase(ecResourceInfoRecord.getCreateUser()) || Configuration.isAdmin(userName))) { @@ -113,6 +114,7 @@ public Message deleteECInfo(HttpServletRequest req, @PathVariable("ticketid") St @ApiImplicitParam(name = "startDate", dataType = "String", value = "start date"), @ApiImplicitParam(name = "endDate", dataType = "String", value = "end date"), @ApiImplicitParam(name = "engineType", dataType = "String", value = "engine type"), + @ApiImplicitParam(name = "status", dataType = "String", value = "engine status"), @ApiImplicitParam(name = "pageNow", dataType = "String", value = "page now"), @ApiImplicitParam(name = "pageSize", dataType = "String", value = "page size") }) @@ -131,6 +133,7 @@ public Message queryEcrHistory( defaultValue = "#{new java.util.Date()}") Date endDate, @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "status", required = false) String status, @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) { String username = SecurityFilter.getLoginUsername(req); @@ -138,6 +141,7 @@ public Message queryEcrHistory( instance = ECResourceInfoUtils.strCheckAndDef(instance, null); String creatorUser = ECResourceInfoUtils.strCheckAndDef(creator, null); engineType = ECResourceInfoUtils.strCheckAndDef(engineType, null); + status = ECResourceInfoUtils.strCheckAndDef(status, null); if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { return Message.error("Invalid creator : " + creatorUser); } @@ -148,7 +152,7 @@ public Message queryEcrHistory( calendar.set(Calendar.SECOND, 0); startDate = calendar.getTime(); } - if (Configuration.isAdmin(username)) { + if (Configuration.isJobHistoryAdmin(username)) { username = null; if (StringUtils.isNotBlank(creatorUser)) { username = creatorUser; @@ -161,12 +165,12 @@ public Message queryEcrHistory( try { queryTasks = ecResourceInfoService.getECResourceInfoRecordList( - instance, endDate, startDate, username, engineType); + instance, endDate, startDate, username, engineType, status); queryTasks.forEach( info -> { ECResourceInfoRecordVo ecrHistroryListVo = new ECResourceInfoRecordVo(); BeanUtils.copyProperties(info, ecrHistroryListVo); - ecrHistroryListVo.setEngineType(info.getLabelValue().split(",")[1].split("-")[0]); + ecrHistroryListVo.setEngineType(info.getEngineType()); ecrHistroryListVo.setUsedResource( ECResourceInfoUtils.getStringToMap(info.getUsedResource(), info)); ecrHistroryListVo.setReleasedResource( @@ -188,25 +192,51 @@ public Message queryEcrHistory( @ApiImplicitParam(name = "creators", dataType = "Array", required = true, value = "creators"), @ApiImplicitParam(name = "engineTypes", dataType = "Array", value = "engine type"), @ApiImplicitParam(name = "statuss", dataType = "Array", value = "statuss"), + @ApiImplicitParam(name = "queueName", dataType = "String", value = "queueName"), + @ApiImplicitParam(name = "ecInstances", dataType = "Array", value = "ecInstances"), + @ApiImplicitParam(name = "crossCluster", dataType = "String", value = "crossCluster"), }) @RequestMapping(path = "/ecList", method = RequestMethod.POST) public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNode) { + String username = ModuleUserUtils.getOperationUser(req, "ecList"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to query ecList.", token); + return Message.error("Token:" + token + " has no permission to query ecList."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to query ecList.", username); + return Message.error("User:" + username + " has no permission to query ecList."); + } JsonNode creatorsParam = jsonNode.get("creators"); JsonNode engineTypesParam = jsonNode.get("engineTypes"); JsonNode statussParam = jsonNode.get("statuss"); + JsonNode queueNameParam = jsonNode.get("queueName"); + JsonNode ecInstancesParam = jsonNode.get("ecInstances"); + JsonNode crossClusterParam = jsonNode.get("crossCluster"); - if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { - return Message.error("creators is null in the parameters of the request(请求参数中【creators】为空)"); - } + // if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { + // return Message.error("creators is null in the parameters of the + // request(请求参数中【creators】为空)"); + // } List creatorUserList = new ArrayList<>(); - try { - creatorUserList = - JsonUtils.jackson() - .readValue(creatorsParam.toString(), new TypeReference>() {}); - } catch (JsonProcessingException e) { - return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + if (creatorsParam != null && !creatorsParam.isNull()) { + try { + creatorUserList = + JsonUtils.jackson() + .readValue(creatorsParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + } + for (String creatorUser : creatorUserList) { + if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { + return Message.error("Invalid creator: " + creatorUser); + } + } } List engineTypeList = new ArrayList<>(); @@ -230,35 +260,52 @@ public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNod return Message.error("parameters:statuss parsing failed(请求参数【statuss】解析失败)"); } } - - String username = ModuleUserUtils.getOperationUser(req, "ecList"); - - String token = ModuleUserUtils.getToken(req); - // check special admin token - if (StringUtils.isNotBlank(token)) { - if (!Configuration.isAdminToken(token)) { - logger.warn("Token:{} has no permission to query ecList.", token); - return Message.error("Token:" + token + " has no permission to query ecList."); + String queueName = ""; + if (queueNameParam != null && !queueNameParam.isNull()) { + try { + queueName = + JsonUtils.jackson() + .readValue(queueNameParam.toString(), new TypeReference() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:queueName parsing failed(请求参数【queueName】解析失败)"); } - } else if (!Configuration.isAdmin(username)) { - logger.warn("User:{} has no permission to query ecList.", username); - return Message.error("User:" + username + " has no permission to query ecList."); } - - for (String creatorUser : creatorUserList) { - if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { - return Message.error("Invalid creator: " + creatorUser); + List ecInstancesList = new ArrayList<>(); + if (ecInstancesParam != null && !ecInstancesParam.isNull()) { + try { + ecInstancesList = + JsonUtils.jackson() + .readValue(ecInstancesParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:instanceName parsing failed(请求参数【ecInstances】解析失败)"); + } + } + Boolean isCrossCluster = null; + if (crossClusterParam != null && !crossClusterParam.isNull()) { + try { + isCrossCluster = + JsonUtils.jackson() + .readValue(crossClusterParam.toString(), new TypeReference() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:crossCluster parsing failed(请求参数【crossCluster】解析失败)"); } } - logger.info( - "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}]", + "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}], queueName:{}, instanceNameList:{}", String.join(",", creatorUserList), String.join(",", engineTypeList), - String.join(",", statusStrList)); + String.join(",", statusStrList), + String.join(",", ecInstancesList), + queueNameParam); List> list = - ecResourceInfoService.getECResourceInfoList(creatorUserList, engineTypeList, statusStrList); + ecResourceInfoService.getECResourceInfoList( + creatorUserList, + engineTypeList, + statusStrList, + queueName, + ecInstancesList, + isCrossCluster); return Message.ok().data("ecList", list); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java index 4d8bbad27d3..dd27e52a34b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java @@ -25,24 +25,38 @@ import org.apache.linkis.manager.am.exception.AMErrorCode; import org.apache.linkis.manager.am.exception.AMErrorException; import org.apache.linkis.manager.am.manager.EngineNodeManager; +import org.apache.linkis.manager.am.service.ECResourceInfoService; import org.apache.linkis.manager.am.service.em.ECMOperateService; import org.apache.linkis.manager.am.service.em.EMInfoService; +import org.apache.linkis.manager.am.service.engine.DefaultEngineCreateService; import org.apache.linkis.manager.am.utils.AMUtils; +import org.apache.linkis.manager.am.vo.CanCreateECRes; import org.apache.linkis.manager.am.vo.EMNodeVo; import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy; import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; +import org.apache.linkis.manager.common.exception.RMErrorException; import org.apache.linkis.manager.common.protocol.OperateRequest$; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest$; import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineCreateRequest; +import org.apache.linkis.manager.exception.PersistenceErrorException; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.UserModifiable; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.manager.label.exception.LabelErrorException; import org.apache.linkis.manager.label.service.NodeLabelService; +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator; +import org.apache.linkis.manager.persistence.LabelManagerPersistence; +import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; +import org.apache.linkis.manager.persistence.ResourceManagerPersistence; +import org.apache.linkis.manager.rm.external.service.ExternalResourceService; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -57,6 +71,7 @@ import javax.servlet.http.HttpServletRequest; +import java.text.MessageFormat; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -91,12 +106,24 @@ public class EMRestfulApi { @Autowired private ECMOperateService ecmOperateService; + @Autowired private ECResourceInfoService ecResourceInfoService; + + @Autowired private ResourceManagerPersistence resourceManagerPersistence; + + @Autowired private LabelManagerPersistence labelManagerPersistence; + + @Autowired private ExternalResourceService externalResourceService; + + @Autowired private DefaultEngineCreateService defaultEngineCreateService; + + @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence; + private LabelBuilderFactory stdLabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory(); private Logger logger = LoggerFactory.getLogger(EMRestfulApi.class); - private String[] adminOperations = AMConfiguration.ECM_ADMIN_OPERATIONS().getValue().split(","); + private String[] adminOperations = AMConfiguration.ECM_ADMIN_OPERATIONS.getValue().split(","); private void checkAdmin(String userName) throws AMErrorException { if (Configuration.isNotAdmin(userName)) { @@ -227,6 +254,7 @@ public Message listAllNodeHealthyStatus( @ApiImplicitParam(name = "instance", dataType = "String", example = "bdp110:9102"), @ApiImplicitParam(name = "labels", dataType = "List", value = "Labels"), @ApiImplicitParam(name = "labelKey", dataType = "String", example = "emInstance"), + @ApiImplicitParam(name = "description", dataType = "String", example = ""), @ApiImplicitParam( name = "stringValue", dataType = "String", @@ -285,6 +313,10 @@ public Message modifyEMInfo(HttpServletRequest req, @RequestBody JsonNode jsonNo nodeLabelService.updateLabelsToNode(serviceInstance, newLabelList); logger.info("success to update label of instance: " + serviceInstance.getInstance()); } + JsonNode description = jsonNode.get("description"); + if (null != description) { + nodeMetricManagerPersistence.updateNodeMetricDescription(description.asText(), instance); + } return Message.ok("success"); } @@ -323,7 +355,10 @@ public Message executeECMOperationByEC(HttpServletRequest req, @RequestBody Json return Message.error( "You have no permission to execute ECM Operation by this EngineConn " + serviceInstance); } - return executeECMOperation(engineNode.getEMNode(), new ECMOperateRequest(userName, parameters)); + return executeECMOperation( + engineNode.getEMNode(), + engineNode.getServiceInstance().getInstance(), + new ECMOperateRequest(userName, parameters)); } @ApiOperation( @@ -354,7 +389,14 @@ public Message executeECMOperation(HttpServletRequest req, @RequestBody JsonNode "Fail to process the operation parameters, cased by " + ExceptionUtils.getRootCauseMessage(e)); } - return executeECMOperation(ecmNode, new ECMOperateRequest(userName, parameters)); + if (parameters.containsKey("engineConnInstance")) { + return executeECMOperation( + ecmNode, + parameters.get("engineConnInstance").toString(), + new ECMOperateRequest(userName, parameters)); + } else { + return executeECMOperation(ecmNode, "", new ECMOperateRequest(userName, parameters)); + } } @ApiOperation(value = "openEngineLog", notes = "open Engine log", response = Message.class) @@ -377,9 +419,10 @@ public Message openEngineLog(HttpServletRequest req, @RequestBody JsonNode jsonN String userName = ModuleUserUtils.getOperationUser(req, "openEngineLog"); EMNode ecmNode; Map parameters; + String engineInstance; try { String emInstance = jsonNode.get("emInstance").asText(); - String engineInstance = jsonNode.get("instance").asText(); + engineInstance = jsonNode.get("instance").asText(); ServiceInstance serviceInstance = EngineRestfulApi.getServiceInstance(jsonNode); logger.info("User {} try to open engine: {} log.", userName, serviceInstance); ecmNode = @@ -392,7 +435,11 @@ public Message openEngineLog(HttpServletRequest req, @RequestBody JsonNode jsonN jsonNode.get("parameters").toString(), new TypeReference>() {}); String logType = (String) parameters.get("logType"); - if (!logType.equals("stdout") && !logType.equals("stderr")) { + if (!logType.equals("stdout") + && !logType.equals("stderr") + && !logType.equals("gc") + && !logType.equals("udfLog") + && !logType.equals("yarnApp")) { throw new AMErrorException( AMErrorCode.PARAM_ERROR.getErrorCode(), AMErrorCode.PARAM_ERROR.getErrorDesc()); } @@ -413,16 +460,24 @@ public Message openEngineLog(HttpServletRequest req, @RequestBody JsonNode jsonN logger.error("Failed to open engine log, error:", e); return Message.error(e.getMessage()); } - return executeECMOperation(ecmNode, new ECMOperateRequest(userName, parameters)); + return executeECMOperation( + ecmNode, engineInstance, new ECMOperateRequest(userName, parameters)); } - private Message executeECMOperation(EMNode ecmNode, ECMOperateRequest ecmOperateRequest) { + private Message executeECMOperation( + EMNode ecmNode, String engineInstance, ECMOperateRequest ecmOperateRequest) { + if (Objects.isNull(ecmNode)) { + return Message.error( + MessageFormat.format( + "ECM node :[{0}] does not exist, Unable to open engine log(ECM节点:[{1}] 异常,无法打开日志,可能是该节点服务重启或者服务异常导致)", + engineInstance, engineInstance)); + } String operationName = OperateRequest$.MODULE$.getOperationName(ecmOperateRequest.parameters()); - if (ArrayUtils.contains(adminOperations, operationName) - && Configuration.isNotAdmin(ecmOperateRequest.user())) { + String userName = ecmOperateRequest.user(); + if (ArrayUtils.contains(adminOperations, operationName) && Configuration.isNotAdmin(userName)) { logger.warn( "User {} has no permission to execute {} admin Operation in ECM {}.", - ecmOperateRequest.user(), + userName, operationName, ecmNode.getServiceInstance()); return Message.error( @@ -431,6 +486,33 @@ private Message executeECMOperation(EMNode ecmNode, ECMOperateRequest ecmOperate + " admin Operation in ECM " + ecmNode.getServiceInstance()); } + + // fill in logDirSuffix + if (StringUtils.isNotBlank(engineInstance) + && Objects.isNull(ecmOperateRequest.parameters().get("logDirSuffix"))) { + ECResourceInfoRecord ecResourceInfoRecord = + ecResourceInfoService.getECResourceInfoRecordByInstance(engineInstance); + if (Objects.isNull(ecResourceInfoRecord)) { + return Message.error("EC instance: " + engineInstance + " not exist "); + } + // eg logDirSuffix -> root/20230705/io_file/6d48068a-0e1e-44b5-8eb2-835034db5b30/logs + String logDirSuffix = ecResourceInfoRecord.getLogDirSuffix(); + if (!userName.equals(ecResourceInfoRecord.getCreateUser()) + && Configuration.isNotJobHistoryAdmin(userName)) { + logger.warn( + "User {} has no permission to get log with path: {} in ECM:{}.", + userName, + logDirSuffix, + ecmNode.getServiceInstance()); + return Message.error( + "You have no permission to get log with path:" + + logDirSuffix + + " in ECM:" + + ecmNode.getServiceInstance()); + } + ecmOperateRequest.parameters().put("logDirSuffix", logDirSuffix); + } + ECMOperateResponse engineOperateResponse = ecmOperateService.executeOperation(ecmNode, ecmOperateRequest); @@ -439,4 +521,86 @@ private Message executeECMOperation(EMNode ecmNode, ECMOperateRequest ecmOperate .data("errorMsg", engineOperateResponse.errorMsg()) .data("isError", engineOperateResponse.isError()); } + + @ApiOperation( + value = "taskprediction", + notes = "linkis task taskprediction", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "username", dataType = "String", example = "hadoop"), + @ApiImplicitParam(name = "engineType", dataType = "String", example = "spark/hive"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "ide"), + @ApiImplicitParam(name = "clustername", dataType = "String", example = "clustername"), + @ApiImplicitParam(name = "queueName", dataType = "String", example = "queueName"), + @ApiImplicitParam(name = "tenant", dataType = "String", defaultValue = "tenant"), + }) + @ApiOperationSupport(ignoreParameters = {"jsonNode"}) + @RequestMapping(path = "/task-prediction", method = RequestMethod.GET) + public Message taskprediction( + HttpServletRequest req, + @RequestParam(value = "username", required = false) String username, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "clustername", required = false) String clusterName, + @RequestParam(value = "queueName", required = false) String queueName, + @RequestParam(value = "tenant", required = false) String tenant) + throws PersistenceErrorException, RMErrorException { + String loginUser = ModuleUserUtils.getOperationUser(req, "taskprediction"); + if (StringUtils.isBlank(username)) { + username = loginUser; + } + if (StringUtils.isBlank(engineType)) { + return Message.error("parameters:engineType can't be null (请求参数【engineType】不能为空)"); + } + if (StringUtils.isBlank(creator)) { + return Message.error("parameters:creator can't be null (请求参数【creator】不能为空)"); + } + UserCreatorLabel userCreatorLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory().createLabel(UserCreatorLabel.class); + userCreatorLabel.setCreator(creator); + userCreatorLabel.setUser(username); + EngineTypeLabel engineTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(engineType); + + Map parms = new HashMap<>(); + parms.put(userCreatorLabel.getLabelKey(), userCreatorLabel.getStringValue()); + parms.put(engineTypeLabel.getLabelKey(), engineTypeLabel.getStringValue()); + if (StringUtils.isNotBlank(tenant)) { + parms.put("tenant", tenant); + } + EngineCreateRequest engineCreateRequest = new EngineCreateRequest(); + engineCreateRequest.setUser(username); + engineCreateRequest.setLabels(parms); + CanCreateECRes canCreateECRes = defaultEngineCreateService.canCreateEC(engineCreateRequest); + return Message.ok() + .data("tenant", tenant) + .data("userResource", canCreateECRes.getLabelResource()) + .data("ecmResource", canCreateECRes.getEcmResource()) + .data("yarnResource", canCreateECRes.getYarnResource()) + .data("checkResult", canCreateECRes.isCanCreateEC()); + } + + @ApiOperation( + value = "reset resource", + notes = "ecm & user resource reset", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "serviceInstance", + dataType = "String", + example = "gz.bdz.bdplxxxxx.webank:9102"), + @ApiImplicitParam(name = "username", dataType = "String", example = "hadoop") + }) + @RequestMapping(path = "/reset-resource", method = RequestMethod.GET) + public Message resetResource( + HttpServletRequest req, + @RequestParam(value = "serviceInstance", required = false) String serviceInstance, + @RequestParam(value = "username", required = false) String username) { + + String loginUser = ModuleUserUtils.getOperationUser(req, "reset resource"); + if (Configuration.isNotAdmin(loginUser)) { + return Message.error("Only Admin Can Use Reset Resource (重置资源仅管理员使用)"); + } + emInfoService.resetResource(serviceInstance, username); + return Message.ok(); + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java index 475c58c9e8a..1e118e0ee7c 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java @@ -23,28 +23,29 @@ import org.apache.linkis.common.utils.ByteTimeUtils; import org.apache.linkis.common.utils.JsonUtils; import org.apache.linkis.governance.common.conf.GovernanceCommonConf; +import org.apache.linkis.governance.common.constant.ec.ECConstants; +import org.apache.linkis.governance.common.utils.JobUtils; +import org.apache.linkis.governance.common.utils.LoggerUtils; import org.apache.linkis.manager.am.conf.AMConfiguration; import org.apache.linkis.manager.am.exception.AMErrorCode; import org.apache.linkis.manager.am.exception.AMErrorException; import org.apache.linkis.manager.am.manager.EngineNodeManager; import org.apache.linkis.manager.am.service.ECResourceInfoService; -import org.apache.linkis.manager.am.service.engine.EngineCreateService; -import org.apache.linkis.manager.am.service.engine.EngineInfoService; -import org.apache.linkis.manager.am.service.engine.EngineOperateService; -import org.apache.linkis.manager.am.service.engine.EngineStopService; +import org.apache.linkis.manager.am.service.engine.*; import org.apache.linkis.manager.am.util.ECResourceInfoUtils; import org.apache.linkis.manager.am.utils.AMUtils; import org.apache.linkis.manager.am.vo.AMEngineNodeVo; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.node.AMEMNode; +import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; -import org.apache.linkis.manager.common.protocol.engine.EngineCreateRequest; -import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; -import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; -import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.common.protocol.engine.*; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.UserModifiable; import org.apache.linkis.manager.label.exception.LabelErrorException; @@ -52,6 +53,7 @@ import org.apache.linkis.rpc.Sender; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.linkis.storage.utils.StorageUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -65,11 +67,10 @@ import java.io.IOException; import java.text.MessageFormat; import java.util.*; +import java.util.concurrent.Callable; import java.util.stream.Collectors; import java.util.stream.Stream; -import scala.annotation.meta.param; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; @@ -91,6 +92,7 @@ public class EngineRestfulApi { @Autowired private EngineInfoService engineInfoService; + @Autowired private EngineAskEngineService engineAskService; @Autowired private EngineCreateService engineCreateService; @Autowired private EngineNodeManager engineNodeManager; @@ -103,6 +105,8 @@ public class EngineRestfulApi { @Autowired private ECResourceInfoService ecResourceInfoService; + @Autowired private EngineReuseService engineReuseService; + private final ObjectMapper objectMapper = new ObjectMapper(); private LabelBuilderFactory stdLabelBuilderFactory = @@ -110,6 +114,183 @@ public class EngineRestfulApi { private static final Logger logger = LoggerFactory.getLogger(EngineRestfulApi.class); + @ApiOperation(value = "askEngineConn", response = Message.class) + @ApiOperationSupport(ignoreParameters = {"jsonNode"}) + @RequestMapping(path = "/askEngineConn", method = RequestMethod.POST) + public Message askEngineConn( + HttpServletRequest req, @RequestBody EngineAskRequest engineAskRequest) + throws IOException, InterruptedException { + String userName = ModuleUserUtils.getOperationUser(req, "askEngineConn"); + engineAskRequest.setUser(userName); + long timeout = engineAskRequest.getTimeOut(); + if (timeout <= 0) { + timeout = AMConfiguration.ENGINE_CONN_START_REST_MAX_WAIT_TIME.getValue().toLong(); + engineAskRequest.setTimeOut(timeout); + } + Map retEngineNode = new HashMap<>(); + logger.info( + "User {} try to ask an engineConn with maxStartTime {}. EngineAskRequest is {}.", + userName, + ByteTimeUtils.msDurationToString(timeout), + engineAskRequest); + Sender sender = Sender.getSender(Sender.getThisServiceInstance()); + EngineNode engineNode = null; + + // try to reuse ec first + String taskId = JobUtils.getJobIdFromStringMap(engineAskRequest.getProperties()); + LoggerUtils.setJobIdMDC(taskId); + logger.info("received task : {}, engineAskRequest : {}", taskId, engineAskRequest); + if (!engineAskRequest.getLabels().containsKey(LabelKeyConstant.EXECUTE_ONCE_KEY)) { + EngineReuseRequest engineReuseRequest = new EngineReuseRequest(); + engineReuseRequest.setLabels(engineAskRequest.getLabels()); + engineReuseRequest.setTimeOut(engineAskRequest.getTimeOut()); + engineReuseRequest.setUser(engineAskRequest.getUser()); + engineReuseRequest.setProperties(engineAskRequest.getProperties()); + boolean end = false; + EngineNode reuseNode = null; + int count = 0; + int MAX_RETRY = 2; + while (!end) { + try { + reuseNode = engineReuseService.reuseEngine(engineReuseRequest, sender); + end = true; + } catch (LinkisRetryException e) { + logger.error( + "task: {}, user: {} reuse engine failed", taskId, engineReuseRequest.getUser(), e); + Thread.sleep(1000); + end = false; + count += 1; + if (count > MAX_RETRY) { + end = true; + } + } catch (Exception e1) { + logger.info( + "task: {} user: {} reuse engine failed", taskId, engineReuseRequest.getUser(), e1); + end = true; + } + } + if (null != reuseNode) { + logger.info( + "Finished to ask engine for task: {}, user: {} by reuse node {}", + taskId, + engineReuseRequest.getUser(), + reuseNode); + LoggerUtils.removeJobIdMDC(); + engineNode = reuseNode; + } + } + + if (null != engineNode) { + fillResultEngineNode(retEngineNode, engineNode); + return Message.ok("reuse engineConn ended.").data("engine", retEngineNode); + } + + String engineAskAsyncId = EngineAskEngineService$.MODULE$.getAsyncId(); + Callable createECTask = + new Callable() { + @Override + public Object call() { + LoggerUtils.setJobIdMDC(taskId); + logger.info( + "Task: {}, start to async({}) createEngine: {}", + taskId, + engineAskAsyncId, + engineAskRequest.getCreateService()); + // remove engineInstance label if exists + engineAskRequest.getLabels().remove("engineInstance"); + EngineCreateRequest engineCreateRequest = new EngineCreateRequest(); + engineCreateRequest.setLabels(engineAskRequest.getLabels()); + engineCreateRequest.setTimeout(engineAskRequest.getTimeOut()); + engineCreateRequest.setUser(engineAskRequest.getUser()); + engineCreateRequest.setProperties(engineAskRequest.getProperties()); + engineCreateRequest.setCreateService(engineAskRequest.getCreateService()); + try { + EngineNode createNode = engineCreateService.createEngine(engineCreateRequest, sender); + long timeout = 0L; + if (engineCreateRequest.getTimeout() <= 0) { + timeout = AMConfiguration.ENGINE_START_MAX_TIME.getValue().toLong(); + } else { + timeout = engineCreateRequest.getTimeout(); + } + // useEngine need to add timeout + EngineNode createEngineNode = engineNodeManager.useEngine(createNode, timeout); + if (null == createEngineNode) { + throw new LinkisRetryException( + AMConstant.EM_ERROR_CODE, + "create engine${createNode.getServiceInstance} success, but to use engine failed"); + } + logger.info( + "Task: $taskId finished to ask engine for user ${engineAskRequest.getUser} by create node $createEngineNode"); + return createEngineNode; + } catch (Exception e) { + logger.error( + "Task: {} failed to ask engine for user {} by create node", taskId, userName, e); + return new LinkisRetryException(AMConstant.EM_ERROR_CODE, e.getMessage()); + } finally { + LoggerUtils.removeJobIdMDC(); + } + } + }; + + try { + Object rs = createECTask.call(); + if (rs instanceof LinkisRetryException) { + throw (LinkisRetryException) rs; + } else { + engineNode = (EngineNode) rs; + } + } catch (LinkisRetryException retryException) { + logger.error( + "User {} create engineConn failed get retry exception. can be Retry", + userName, + retryException); + return Message.error( + String.format( + "Create engineConn failed, caused by %s.", + ExceptionUtils.getRootCauseMessage(retryException))) + .data("canRetry", true); + } catch (Exception e) { + LoggerUtils.removeJobIdMDC(); + logger.error("User {} create engineConn failed get retry exception", userName, e); + return Message.error( + String.format( + "Create engineConn failed, caused by %s.", ExceptionUtils.getRootCauseMessage(e))); + } + + LoggerUtils.removeJobIdMDC(); + fillResultEngineNode(retEngineNode, engineNode); + logger.info( + "Finished to create a engineConn for user {}. NodeInfo is {}.", userName, engineNode); + // to transform to a map + return Message.ok("create engineConn ended.").data("engine", retEngineNode); + } + + private void fillNullNode( + Map retEngineNode, EngineAskAsyncResponse askAsyncResponse) { + retEngineNode.put(AMConstant.EC_ASYNC_START_RESULT_KEY, AMConstant.EC_ASYNC_START_RESULT_FAIL); + retEngineNode.put( + AMConstant.EC_ASYNC_START_FAIL_MSG_KEY, + "Got null response for asyId : " + askAsyncResponse.id()); + retEngineNode.put(ECConstants.MANAGER_SERVICE_INSTANCE_KEY(), Sender.getThisServiceInstance()); + } + + private void fillResultEngineNode(Map retEngineNode, EngineNode engineNode) { + retEngineNode.put( + AMConstant.EC_ASYNC_START_RESULT_KEY, AMConstant.EC_ASYNC_START_RESULT_SUCCESS); + retEngineNode.put("serviceInstance", engineNode.getServiceInstance()); + if (null == engineNode.getNodeStatus()) { + engineNode.setNodeStatus(NodeStatus.Starting); + } + retEngineNode.put(ECConstants.NODE_STATUS_KEY(), engineNode.getNodeStatus().toString()); + retEngineNode.put(ECConstants.EC_TICKET_ID_KEY(), engineNode.getTicketId()); + EMNode emNode = engineNode.getEMNode(); + if (null != emNode) { + retEngineNode.put( + ECConstants.ECM_SERVICE_INSTANCE_KEY(), engineNode.getEMNode().getServiceInstance()); + } + retEngineNode.put(ECConstants.MANAGER_SERVICE_INSTANCE_KEY(), Sender.getThisServiceInstance()); + } + @ApiOperation(value = "createEngineConn", response = Message.class) @ApiOperationSupport(ignoreParameters = {"jsonNode"}) @RequestMapping(path = "/createEngineConn", method = RequestMethod.POST) @@ -120,7 +301,7 @@ public Message createEngineConn( engineCreateRequest.setUser(userName); long timeout = engineCreateRequest.getTimeout(); if (timeout <= 0) { - timeout = AMConfiguration.ENGINE_CONN_START_REST_MAX_WAIT_TIME().getValue().toLong(); + timeout = AMConfiguration.ENGINE_CONN_START_REST_MAX_WAIT_TIME.getValue().toLong(); engineCreateRequest.setTimeout(timeout); } logger.info( @@ -149,13 +330,7 @@ public Message createEngineConn( "Finished to create a engineConn for user {}. NodeInfo is {}.", userName, engineNode); // to transform to a map Map retEngineNode = new HashMap<>(); - retEngineNode.put("serviceInstance", engineNode.getServiceInstance()); - if (null == engineNode.getNodeStatus()) { - engineNode.setNodeStatus(NodeStatus.Starting); - } - retEngineNode.put("nodeStatus", engineNode.getNodeStatus().toString()); - retEngineNode.put("ticketId", engineNode.getTicketId()); - retEngineNode.put("ecmServiceInstance", engineNode.getEMNode().getServiceInstance()); + fillResultEngineNode(retEngineNode, engineNode); return Message.ok("create engineConn succeed.").data("engine", retEngineNode); } @@ -173,6 +348,7 @@ public Message getEngineConn(HttpServletRequest req, @RequestBody JsonNode jsonN } catch (Exception e) { logger.info("Instances {} does not exist", serviceInstance.getInstance()); } + String ecMetrics = null; if (null == engineNode) { ECResourceInfoRecord ecInfo = null; if (null != ticketIdNode) { @@ -189,12 +365,19 @@ public Message getEngineConn(HttpServletRequest req, @RequestBody JsonNode jsonN if (null == ecInfo) { return Message.error("Instance does not exist " + serviceInstance); } + if (null == ecMetrics) { + ecMetrics = ecInfo.getMetrics(); + } engineNode = ECResourceInfoUtils.convertECInfoTOECNode(ecInfo); + } else { + ecMetrics = engineNode.getEcMetrics(); } if (!userName.equals(engineNode.getOwner()) && Configuration.isNotAdmin(userName)) { return Message.error("You have no permission to access EngineConn " + serviceInstance); } - return Message.ok().data("engine", engineNode); + Message result = Message.ok().data("engine", engineNode); + result.data(AMConstant.EC_METRICS_KEY, ecMetrics); + return result; } @ApiOperation(value = "kill egineconn", notes = "kill engineconn", response = Message.class) @@ -271,6 +454,7 @@ public Message killEngine(HttpServletRequest req, @RequestBody Map engineParam : param) { String moduleName = engineParam.get("applicationName"); String engineInstance = engineParam.get("engineInstance"); + logger.info("try to kill engine with engineInstance:{}", engineInstance); EngineStopRequest stopEngineRequest = new EngineStopRequest(ServiceInstance.apply(moduleName, engineInstance), userName); engineStopService.stopEngine(stopEngineRequest, sender); @@ -423,7 +607,8 @@ public Message listEMEngines(HttpServletRequest req, @RequestBody JsonNode jsonN @ApiImplicitParam(name = "instance", dataType = "String", example = "bdp110:12295"), @ApiImplicitParam(name = "labels", dataType = "List", required = false, value = "labels"), @ApiImplicitParam(name = "labelKey", dataType = "String", example = "engineInstance"), - @ApiImplicitParam(name = "stringValue", dataType = "String", example = "linkis-cg:12295") + @ApiImplicitParam(name = "stringValue", dataType = "String", example = "linkis-cg:12295"), + @ApiImplicitParam(name = "nodeHealthy", dataType = "String", example = "UnHealthy") }) @ApiOperationSupport(ignoreParameters = {"jsonNode"}) @RequestMapping(path = "/modifyEngineInfo", method = RequestMethod.PUT) @@ -435,6 +620,7 @@ public Message modifyEngineInfo(HttpServletRequest req, @RequestBody JsonNode js 210003, "Only admin can modify engineConn information(只有管理员才能修改引擎信息)."); } ServiceInstance serviceInstance = getServiceInstance(jsonNode); + JsonNode labels = jsonNode.get("labels"); Set labelKeySet = new HashSet<>(); if (labels != null) { @@ -458,9 +644,53 @@ public Message modifyEngineInfo(HttpServletRequest req, @RequestBody JsonNode js nodeLabelService.updateLabelsToNode(serviceInstance, newLabelList); logger.info("success to update label of instance: " + serviceInstance.getInstance()); } + + // 修改引擎健康状态,只支持 Healthy和 UnHealthy + String healthyKey = "Healthy"; + String unHealthyKey = "UnHealthy"; + JsonNode nodeHealthy = jsonNode.get("nodeHealthy"); + if (nodeHealthy != null && healthyKey.equals(nodeHealthy.asText())) { + engineInfoService.updateEngineHealthyStatus(serviceInstance, NodeHealthy.Healthy); + } else if (nodeHealthy != null && unHealthyKey.equals(nodeHealthy.asText())) { + engineInfoService.updateEngineHealthyStatus(serviceInstance, NodeHealthy.UnHealthy); + } return Message.ok("success to update engine information(更新引擎信息成功)"); } + @ApiOperation( + value = "batchSetEngineToUnHealthy", + notes = "batch set engine to unHealthy", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "instances", + dataType = "String", + example = + "[{\"instance\":\"bdplinkis1001:38701\",\"engineType\":\"spark\",\"applicationName\":\"linkis-cg-engineconn\"}]") + }) + @ApiOperationSupport(ignoreParameters = {"jsonNode"}) + @RequestMapping(path = "/batchSetEngineToUnHealthy", method = RequestMethod.POST) + public Message batchSetEngineToUnHealthy(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws AMErrorException { + String username = ModuleUserUtils.getOperationUser(req, "batchSetEngineToUnHealthy"); + if (Configuration.isNotAdmin(username)) { + throw new AMErrorException( + 210003, "Only admin can modify engineConn healthy info(只有管理员才能修改引擎健康信息)."); + } + + JsonNode instances = jsonNode.get("instances"); + if (instances != null) { + Iterator iterator = instances.iterator(); + while (iterator.hasNext()) { + JsonNode instanceNode = iterator.next(); + ServiceInstance serviceInstance = getServiceInstance(instanceNode); + engineInfoService.updateEngineHealthyStatus(serviceInstance, NodeHealthy.UnHealthy); + } + } + logger.info("success to batch update engine status to UnHealthy."); + return Message.ok("success to update engine information(批量更新引擎健康信息成功)"); + } + @ApiOperation( value = "listAllNodeHealthyStatus", notes = "get all node healthy staus list", @@ -484,6 +714,11 @@ public Message executeEngineConnOperation(HttpServletRequest req, @RequestBody J ServiceInstance serviceInstance = getServiceInstance(jsonNode); logger.info("User {} try to execute Engine Operation {}.", userName, serviceInstance); EngineNode engineNode = engineNodeManager.getEngineNode(serviceInstance); + if (null == engineNode) { + return Message.ok() + .data("isError", true) + .data("errorMsg", "Ec : " + serviceInstance.toString() + " not found."); + } if (!userName.equals(engineNode.getOwner()) && Configuration.isNotAdmin(userName)) { return Message.error("You have no permission to execute Engine Operation " + serviceInstance); } @@ -500,6 +735,53 @@ public Message executeEngineConnOperation(HttpServletRequest req, @RequestBody J .data("isError", engineOperateResponse.isError()); } + @ApiOperation( + value = "kill egineconns of a ecm", + notes = "Kill engine by cteator or engineType", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "creator", dataType = "String", required = true, example = "IDE"), + @ApiImplicitParam( + name = "engineType", + dataType = "String", + required = true, + example = "hive-2.3.3"), + }) + @ApiOperationSupport(ignoreParameters = {"param"}) + @RequestMapping(path = "/rm/killEngineByCreatorEngineType", method = RequestMethod.POST) + public Message killEngineByUpdateConfig(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws AMErrorException { + String userName = ModuleUserUtils.getOperationUser(req); + String jvmUser = StorageUtils.getJvmUser(); + if (jvmUser.equals(userName)) { + return Message.error( + jvmUser + " users do not support this feature (" + jvmUser + " 用户不支持此功能)"); + } + JsonNode creator = jsonNode.get("creator"); + if (null == creator || StringUtils.isBlank(creator.textValue())) { + return Message.error("instance is null in the parameters of the request(请求参数中【creator】为空)"); + } + String creatorStr = Configuration.getGlobalCreator(creator.textValue()); + String engineType = ""; + if (null != jsonNode.get("engineType")) { + engineType = jsonNode.get("engineType").textValue(); + } + if (StringUtils.isNotBlank(engineType) + && AMConfiguration.isUnAllowKilledEngineType(engineType)) { + return Message.error("multi user engine does not support this feature(多用户引擎不支持此功能)"); + } + if (Configuration.GLOBAL_CONF_SYMBOL().equals(engineType)) { + Arrays.stream(AMConfiguration.UDF_KILL_ENGINE_TYPE.split(",")) + .forEach( + engine -> + engineStopService.stopUnlockECByUserCreatorAndECType( + userName, creatorStr, engine)); + } else { + engineStopService.stopUnlockECByUserCreatorAndECType(userName, creatorStr, engineType); + } + return Message.ok("Kill engineConn succeed"); + } + static ServiceInstance getServiceInstance(JsonNode jsonNode) throws AMErrorException { String applicationName = jsonNode.get("applicationName").asText(); String instance = jsonNode.get("instance").asText(); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java index 6aabe19198c..433862e9645 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java @@ -34,14 +34,27 @@ public interface ECResourceInfoService { void deleteECResourceInfoRecord(Integer id); List getECResourceInfoRecordList( - String instance, Date endDate, Date startDate, String username, String engineType); + String instance, + Date endDate, + Date startDate, + String username, + String engineType, + String status); /** * @param creatorUserList engineconn creator list * @param engineTypeList engineconn type list * @param statusStrList engineconn status string list + * @param queueName + * @param ecInstancesList + * @param isCrossCluster * @return */ List> getECResourceInfoList( - List creatorUserList, List engineTypeList, List statusStrList); + List creatorUserList, + List engineTypeList, + List statusStrList, + String queueName, + List ecInstancesList, + Boolean isCrossCluster); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java new file mode 100644 index 00000000000..194eea590d8 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.service; + +import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid; + +@FunctionalInterface +public interface EngineConnPidCallbackService { + void dealPid(ResponseEngineConnPid protocol); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java new file mode 100644 index 00000000000..477d49aa10e --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.service; + +import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM; + +public interface EngineConnStatusCallbackService { + + void dealEngineConnStatusCallbackToAM(EngineConnStatusCallbackToAM engineConnStatusCallbackToAM); +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java new file mode 100644 index 00000000000..5fbbb7c32a3 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.service.impl; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid; +import org.apache.linkis.manager.am.manager.DefaultEngineNodeManager; +import org.apache.linkis.manager.am.service.EngineConnPidCallbackService; +import org.apache.linkis.manager.am.service.engine.AbstractEngineService; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.label.service.NodeLabelService; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class DefaultEngineConnPidCallbackService extends AbstractEngineService + implements EngineConnPidCallbackService { + private static final Logger logger = + LoggerFactory.getLogger(DefaultEngineConnPidCallbackService.class); + + @Autowired private DefaultEngineNodeManager defaultEngineNodeManager; + + @Autowired private NodeLabelService nodeLabelService; + + @Receiver + @Override + public void dealPid(ResponseEngineConnPid protocol) { + // set pid + logger.info( + "DefaultEngineConnPidCallbackService dealPid serviceInstance: [{}] pid: [{}]" + + " ticketId: [{}]", + protocol.serviceInstance(), + protocol.pid(), + protocol.ticketId()); + + EngineNode engineNode = + defaultEngineNodeManager.getEngineNodeInfoByTicketId(protocol.ticketId()); + if (engineNode == null) { + logger.error( + "DefaultEngineConnPidCallbackService dealPid failed, engineNode is null, serviceInstance:{}", + protocol.serviceInstance()); + return; + } + + engineNode.setIdentifier(protocol.pid()); + ServiceInstance oldServiceInstance = engineNode.getServiceInstance(); + if (engineNode.getMark().equals(AMConstant.CLUSTER_PROCESS_MARK)) { + ServiceInstance serviceInstance = protocol.serviceInstance(); + engineNode.setServiceInstance(serviceInstance); + getEngineNodeManager().updateEngineNode(oldServiceInstance, engineNode); + nodeLabelService.labelsFromInstanceToNewInstance(oldServiceInstance, serviceInstance); + } + defaultEngineNodeManager.updateEngine(engineNode); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java new file mode 100644 index 00000000000..7b3d53f4ca6 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.service.impl; + +import org.apache.linkis.manager.am.conf.AMConfiguration; +import org.apache.linkis.manager.am.converter.MetricsConverter; +import org.apache.linkis.manager.am.service.EngineConnStatusCallbackService; +import org.apache.linkis.manager.am.service.engine.EngineStopService; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.metrics.AMNodeMetrics; +import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback; +import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM; +import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; +import org.apache.linkis.rpc.message.annotation.Receiver; +import org.apache.linkis.server.BDPJettyServerHelper; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.io.UnsupportedEncodingException; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class DefaultEngineConnStatusCallbackService implements EngineConnStatusCallbackService { + + private static final Logger logger = + LoggerFactory.getLogger(DefaultEngineConnStatusCallbackService.class); + + @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence; + + @Autowired private MetricsConverter metricsConverter; + + @Autowired private EngineStopService engineStopService; + + private static final String[] canRetryLogs = AMConfiguration.AM_CAN_RETRY_LOGS.split(";"); + + @Receiver + public void dealEngineConnStatusCallback(EngineConnStatusCallback protocol) { + logger.info( + "EngineConnStatusCallbackServiceImpl handle engineConnStatus callback serviceInstance: [{}] status: [{}]", + protocol.serviceInstance(), + protocol.status()); + if (!NodeStatus.isAvailable(protocol.status())) { + dealEngineConnStatusCallbackToAM( + new EngineConnStatusCallbackToAM( + protocol.serviceInstance(), protocol.status(), protocol.initErrorMsg(), false)); + } + } + + @Receiver + public void dealEngineConnStatusCallbackToAM( + EngineConnStatusCallbackToAM engineConnStatusCallbackToAM) { + if (engineConnStatusCallbackToAM.serviceInstance() == null) { + logger.warn("call back service instance is null"); + } + logger.info( + "EngineConnStatusCallbackServiceImpl start to deal engineConnStatusCallbackToAM {}", + engineConnStatusCallbackToAM); + + AMNodeMetrics nodeMetrics = new AMNodeMetrics(); + Map heartBeatMsg = new HashMap<>(); + int initErrorMsgMaxByteNum = 60000; + + String initErrorMsg = engineConnStatusCallbackToAM.initErrorMsg(); + try { + if (StringUtils.isNotBlank(initErrorMsg) + && initErrorMsg.getBytes("utf-8").length >= initErrorMsgMaxByteNum) { + initErrorMsg = initErrorMsg.substring(0, initErrorMsgMaxByteNum); + } + } catch (UnsupportedEncodingException e) { + logger.warn("dealEngineConnStatusCallbackToAM getBytes failed", e); + } + heartBeatMsg.put(AMConstant.START_REASON, initErrorMsg); + + if (engineConnStatusCallbackToAM.canRetry()) { + heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry()); + } else if (matchRetryLog(engineConnStatusCallbackToAM.initErrorMsg())) { + logger.info("match canRetry log {}", engineConnStatusCallbackToAM.serviceInstance()); + heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry()); + } + + try { + nodeMetrics.setHeartBeatMsg( + BDPJettyServerHelper.jacksonJson().writeValueAsString(heartBeatMsg)); + } catch (JsonProcessingException e) { + logger.warn("dealEngineConnStatusCallbackToAM writeValueAsString failed", e); + } + nodeMetrics.setServiceInstance(engineConnStatusCallbackToAM.serviceInstance()); + nodeMetrics.setStatus(metricsConverter.convertStatus(engineConnStatusCallbackToAM.status())); + + nodeMetricManagerPersistence.addOrupdateNodeMetrics(nodeMetrics); + logger.info("Finished to deal engineConnStatusCallbackToAM {}", engineConnStatusCallbackToAM); + } + + private boolean matchRetryLog(String errorMsg) { + boolean flag = false; + if (StringUtils.isNotBlank(errorMsg)) { + String errorMsgLowCase = errorMsg.toLowerCase(Locale.getDefault()); + for (String canRetry : canRetryLogs) { + if (errorMsgLowCase.contains(canRetry)) { + logger.info("match engineConn log fatal logs, is {}", canRetry); + flag = true; + } + } + } + return flag; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java index e8988fee9b0..02fd4476fc3 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java @@ -19,7 +19,10 @@ import org.apache.linkis.manager.am.restful.EMRestfulApi; import org.apache.linkis.manager.am.service.ECResourceInfoService; +import org.apache.linkis.manager.am.service.em.EMInfoService; import org.apache.linkis.manager.am.util.ECResourceInfoUtils; +import org.apache.linkis.manager.am.utils.AMUtils; +import org.apache.linkis.manager.am.vo.EMNodeVo; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; import org.apache.linkis.manager.common.entity.persistence.PersistencerEcNodeInfo; @@ -28,12 +31,16 @@ import org.apache.linkis.manager.dao.NodeManagerMapper; import org.apache.linkis.manager.label.service.NodeLabelService; import org.apache.linkis.manager.persistence.LabelManagerPersistence; +import org.apache.linkis.protocol.constants.TaskConstant; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import java.math.BigDecimal; import java.text.SimpleDateFormat; import java.util.*; import java.util.stream.Collectors; @@ -62,6 +69,8 @@ public class ECResourceInfoServiceImpl implements ECResourceInfoService { @Autowired private NodeLabelService nodeLabelService; + @Autowired private EMInfoService emInfoService; + @Override public ECResourceInfoRecord getECResourceInfoRecord(String ticketId) { if (StringUtils.isNotBlank(ticketId)) { @@ -94,14 +103,24 @@ public void deleteECResourceInfoRecord(Integer id) { @Override public List getECResourceInfoRecordList( - String instance, Date endDate, Date startDate, String username, String engineType) { + String instance, + Date endDate, + Date startDate, + String username, + String engineType, + String status) { return ecResourceRecordMapper.getECResourceInfoHistory( - username, instance, endDate, startDate, engineType); + username, instance, endDate, startDate, engineType, status); } @Override public List> getECResourceInfoList( - List creatorUserList, List engineTypeList, List statusStrList) { + List creatorUserList, + List engineTypeList, + List statusStrList, + String queueName, + List ecInstancesList, + Boolean isCrossCluster) { List> resultList = new ArrayList<>(); @@ -113,7 +132,7 @@ public List> getECResourceInfoList( // get engine conn info list filter by creator user list /instance status list List ecNodesInfo = - nodeManagerMapper.getEMNodeInfoList(creatorUserList, statusIntList); + nodeManagerMapper.getEMNodeInfoList(creatorUserList, statusIntList, ecInstancesList); // map k:v---> instanceName:PersistencerEcNodeInfo Map persistencerEcNodeInfoMap = @@ -143,8 +162,8 @@ public List> getECResourceInfoList( json.writeValueAsString(ecNodeinfo), new TypeReference>() {}); - Integer intStatus = ecNodeinfo.getInstanceStatus(); - item.put("instanceStatus", NodeStatus.values()[intStatus].name()); + Integer instanceStatus = ecNodeinfo.getInstanceStatus(); + item.put("instanceStatus", NodeStatus.values()[instanceStatus].name()); String usedResourceStr = latestRecord.getUsedResource(); /* @@ -152,18 +171,62 @@ public List> getECResourceInfoList( -> {"driver":{"instance":1,"memory":"2.0 GB","cpu":1} } */ - + long lastUnlockTimestamp = 0L; + if (NodeStatus.values()[instanceStatus].name().equals(NodeStatus.Unlock.name())) { + String heartbeatMsg = ecNodeinfo.getHeartbeatMsg(); + Map heartbeatMap = new HashMap<>(); + if (StringUtils.isNotBlank(heartbeatMsg)) { + heartbeatMap = + BDPJettyServerHelper.gson() + .fromJson(heartbeatMsg, new HashMap<>().getClass()); + } + Object lastUnlockTimestampObject = + heartbeatMap.getOrDefault("lastUnlockTimestamp", 0); + BigDecimal lastUnlockTimestampBigDecimal = + new BigDecimal(String.valueOf(lastUnlockTimestampObject)); + lastUnlockTimestamp = lastUnlockTimestampBigDecimal.longValue(); + } + item.put("lastUnlockTimestamp", lastUnlockTimestamp); item.put("useResource", ECResourceInfoUtils.getStringToMap(usedResourceStr)); - item.put("ecmInstance", latestRecord.getEcmInstance()); - String engineType = latestRecord.getLabelValue().split(",")[1].split("-")[0]; + item.put(TaskConstant.ECM_INSTANCE, latestRecord.getEcmInstance()); + String engineType = latestRecord.getEngineType(); item.put("engineType", engineType); - resultList.add(item); + if (StringUtils.isNotBlank(queueName)) { + Map usedResourceMap = + ECResourceInfoUtils.getStringToMap(usedResourceStr); + Map yarn = MapUtils.getMap(usedResourceMap, "yarn", new HashMap()); + String queueNameStr = String.valueOf(yarn.getOrDefault("queueName", "")); + if (StringUtils.isNotBlank(queueNameStr) && queueName.equals(queueNameStr)) { + resultList.add(item); + } + } else { + resultList.add(item); + } } catch (JsonProcessingException e) { logger.error("Fail to process the ec node info: [{}]", ecNodeinfo, e); } } }); - + if (null != isCrossCluster) { + List> resultListByCluster = new ArrayList<>(); + List emNodeVos = AMUtils.copyToEMVo(emInfoService.getAllEM()); + Map clusterMap = + emNodeVos.stream() + .filter( + s -> s.getLabels().stream().anyMatch(d -> d.getLabelKey().equals("yarnCluster"))) + .collect((Collectors.toMap(EMNodeVo::getInstance, item -> item))); + for (Map stringObjectMap : resultList) { + if (isCrossCluster + && clusterMap.containsKey(stringObjectMap.get(TaskConstant.ECM_INSTANCE).toString())) { + resultListByCluster.add(stringObjectMap); + } else if (!isCrossCluster + && !clusterMap.containsKey(stringObjectMap.get(TaskConstant.ECM_INSTANCE).toString())) { + resultListByCluster.add(stringObjectMap); + } + } + resultList.clear(); + resultList.addAll(resultListByCluster); + } return resultList; } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java index ab926df64e4..85c7470ce5b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java @@ -61,7 +61,7 @@ public static ResourceVo getStringToMap(String str, ECResourceInfoRecord info) { Map divermap = MapUtils.getMap(map, "driver"); resourceVo.setInstance(((Double) divermap.get("instance")).intValue()); resourceVo.setCores(((Double) divermap.get("cpu")).intValue()); - String memoryStr = String.valueOf(map.getOrDefault("memory", "0k")); + String memoryStr = String.valueOf(divermap.getOrDefault("memory", "0k")); long memorylong = 0; if (!getScientific(memoryStr)) { memorylong = ByteTimeUtils.byteStringAsBytes(memoryStr); @@ -125,6 +125,7 @@ public static AMEngineNode convertECInfoTOECNode(ECResourceInfoRecord ecInfo) { engineNode.setTicketId(ecInfo.getTicketId()); engineNode.setStartTime(ecInfo.getCreateTime()); engineNode.setUpdateTime(ecInfo.getReleaseTime()); + engineNode.setEcMetrics(ecInfo.getMetrics()); return engineNode; } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/EMUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/EMUtils.java new file mode 100644 index 00000000000..61d34a473d8 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/EMUtils.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.util; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.governance.common.protocol.conf.TenantRequest; +import org.apache.linkis.governance.common.protocol.conf.TenantResponse; +import org.apache.linkis.manager.am.vo.ConfigVo; +import org.apache.linkis.rpc.Sender; + +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class EMUtils { + + private static Logger logger = LoggerFactory.getLogger(EMUtils.class); + + public static String getTenant(String username, String creator) { + Sender sender = + Sender.getSender( + Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME().getValue()); + TenantResponse response = (TenantResponse) sender.ask(new TenantRequest(username, creator)); + if (StringUtils.isBlank(response.tenant())) { + response = (TenantResponse) sender.ask(new TenantRequest(username, "*")); + if (StringUtils.isBlank(response.tenant())) { + response = (TenantResponse) sender.ask(new TenantRequest("*", creator)); + } + } + return response.tenant(); + } + + public static List getUserConf(String username, String creator, String engineType) { + // 获取用户配置信息 + List configlist = new ArrayList<>(); + try { + String url = + MessageFormat.format( + "/api/rest_j/v1/configuration/getFullTreesByAppName?creator={0}&engineType={1}", + creator, engineType); + HttpGet httpGet = new HttpGet(Configuration.getGateWayURL() + url); + httpGet.addHeader("Token-User", username); + httpGet.addHeader("Token-Code", Configuration.LINKIS_TOKEN().getValue()); + String responseStr = + EntityUtils.toString(HttpClients.createDefault().execute(httpGet).getEntity()); + JsonNode fullTree = new ObjectMapper().readTree(responseStr).get("data").get("fullTree"); + for (JsonNode node : fullTree) { + JsonNode settingsList = node.get("settings"); + for (JsonNode key : settingsList) { + configlist.add( + new ConfigVo( + key.get("key").asText(), + key.get("defaultValue").asText(), + key.get("configValue").asText())); + } + } + } catch (IOException e) { + logger.error("获取用户配置信息失败(Failed to obtain user configuration information)"); + } + return configlist.stream() + .filter( + confInfo -> { + String configValue = confInfo.getConfigValue(); + String defaultValue = confInfo.getDefaultValue(); + return (StringUtils.isNotBlank(configValue) && !configValue.equals("null")) + || (StringUtils.isNotBlank(defaultValue) && !defaultValue.equals("null")); + }) + .collect(Collectors.toList()); + } + + public static String getConfValue(List configVoList, String confKey) { + String confValue = "0"; + for (ConfigVo configVo : configVoList) { + if (configVo.getKey().equals(confKey)) { + confValue = configVo.getConfigValue(); + if (StringUtils.isBlank(confValue)) { + confValue = configVo.getDefaultValue(); + } + } + } + return removeUnit(confValue); + } + + public static String removeUnit(String input) { + // 使用正则表达式匹配数字和单位,然后仅保留数字部分 + Pattern pattern = Pattern.compile("(\\d+)([gG])"); + Matcher matcher = pattern.matcher(input); + if (matcher.find()) { + return matcher.group(1); + } else { + return input; + } + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/AMEngineNodeVo.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/AMEngineNodeVo.java index 52ea0108853..d7208f211ab 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/AMEngineNodeVo.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/AMEngineNodeVo.java @@ -129,6 +129,7 @@ public void setInstance(String instance) { } public ResourceType getResourceType() { + return resourceType; } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/CanCreateECRes.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/CanCreateECRes.java new file mode 100644 index 00000000000..a6c9594bc1b --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/CanCreateECRes.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.vo; + +import java.util.Map; + +public class CanCreateECRes { + + private boolean canCreateEC = true; + + /** Need to give the reason when canCreateEc set false */ + private String reason; + + private String labelResource; + + private String ecmResource; + + private String requestResource; + + private String yarnResource; + + private Map labels; + + public boolean isCanCreateEC() { + return canCreateEC; + } + + public void setCanCreateEC(boolean canCreateEC) { + this.canCreateEC = canCreateEC; + } + + public String getReason() { + return reason; + } + + public void setReason(String reason) { + this.reason = reason; + } + + public String getLabelResource() { + return labelResource; + } + + public void setLabelResource(String labelResource) { + this.labelResource = labelResource; + } + + public String getEcmResource() { + return ecmResource; + } + + public void setEcmResource(String ecmResource) { + this.ecmResource = ecmResource; + } + + public String getRequestResource() { + return requestResource; + } + + public void setRequestResource(String requestResource) { + this.requestResource = requestResource; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } + + public String getYarnResource() { + return yarnResource; + } + + public void setYarnResource(String yarnResource) { + this.yarnResource = yarnResource; + } + + @Override + public String toString() { + return "CanCreateECRes{" + "canCreateEC=" + canCreateEC + ", reason='" + reason + '\'' + '}'; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/ConfigVo.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/ConfigVo.java new file mode 100644 index 00000000000..fe89e276f06 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/ConfigVo.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.vo; + +public class ConfigVo { + + private String key; + private String defaultValue; + private String configValue; + + public ConfigVo(String key, String defaultValue, String configValue) { + this.key = key; + this.defaultValue = defaultValue; + this.configValue = configValue; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + @Override + public String toString() { + return "ConfigVo{" + + "key='" + + key + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", configValue='" + + configValue + + '\'' + + '}'; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/EMNodeVo.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/EMNodeVo.java index d08614a2174..5e801f59238 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/EMNodeVo.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/EMNodeVo.java @@ -71,6 +71,8 @@ public class EMNodeVo { private Date startTime; + private String description; + public List